summaryrefslogtreecommitdiff
path: root/chromium/third_party/webrtc/modules
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2016-07-14 17:41:05 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2016-08-04 12:37:36 +0000
commit399c965b6064c440ddcf4015f5f8e9d131c7a0a6 (patch)
tree6b06b60ff365abef0e13b3503d593a0df48d20e8 /chromium/third_party/webrtc/modules
parent7366110654eec46f21b6824f302356426f48cd74 (diff)
downloadqtwebengine-chromium-399c965b6064c440ddcf4015f5f8e9d131c7a0a6.tar.gz
BASELINE: Update Chromium to 52.0.2743.76 and Ninja to 1.7.1
Change-Id: I382f51b959689505a60f8b707255ecb344f7d8b4 Reviewed-by: Michael BrĂ¼ning <michael.bruning@qt.io>
Diffstat (limited to 'chromium/third_party/webrtc/modules')
-rw-r--r--chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/BUILD.gn41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi44
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h36
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc127
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc152
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc92
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc329
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c603
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc442
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h222
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h49
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc347
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc61
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h28
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h18
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc24
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc127
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc19
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc18
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc88
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc36
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc46
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi21
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc205
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc489
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc763
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc (renamed from chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc)16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc47
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc135
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc112
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc196
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h29
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc110
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/build_info.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device.gypi5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm340
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm136
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h30
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm217
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m18
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm11
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm39
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm17
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc45
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc21
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/BUILD.gn46
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc725
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h192
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h89
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc336
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc206
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h79
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc219
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc56
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h54
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c)20
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c)11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c)1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c)13
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c)24
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi50
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc69
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc)9
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/debug.proto6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc72
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h108
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc65
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h129
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c13
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h14
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc78
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c)43
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c)19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc30
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h24
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h19
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h6
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc8
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h3
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc157
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc157
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h61
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc56
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h17
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc44
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h26
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc22
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm2
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc1
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h1
-rw-r--r--chromium/third_party/webrtc/modules/include/module_common_types.h74
-rw-r--r--chromium/third_party/webrtc/modules/media_file/media_file_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/modules.gyp127
-rw-r--r--chromium/third_party/webrtc/modules/modules_java.gyp9
-rw-r--r--chromium/third_party/webrtc/modules/modules_java_chromium.gyp8
-rw-r--r--chromium/third_party/webrtc/modules/modules_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests.isolate13
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc54
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober.h18
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h4
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.cc67
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.h45
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc488
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.cc3
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.h5
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc55
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h14
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc8
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc37
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc6
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc76
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc10
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc7
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc17
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc19
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc91
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h41
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h28
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h22
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc29
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc60
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc29
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h19
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc53
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc63
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc36
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc64
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc203
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h75
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc509
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h187
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h56
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h33
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc252
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc48
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc23
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc53
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc118
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h31
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc147
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc65
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc56
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc17
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc9
-rw-r--r--chromium/third_party/webrtc/modules/utility/OWNERS1
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/file_recorder.h4
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/jvm_android.h15
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h6
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/process_thread.h7
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/coder.cc145
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/coder.h60
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_impl.h1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc3
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h23
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/jvm_android.cc16
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc14
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc73
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc52
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h14
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/BUILD.gn13
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/OWNERS3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc160
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc168
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codec_database.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc43
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc30
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc44
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc18
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc27
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h82
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc46
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp21
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc31
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc39
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc126
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h72
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc154
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h83
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc329
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.cc45
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.h40
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc228
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/generic_encoder.h102
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h89
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/include/video_coding.h42
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h49
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc22
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc29
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_opt_util.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization.cc160
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization.h25
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module.cc64
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module.h15
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc87
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc1414
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select.cc953
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select.h356
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select_data.h227
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc1307
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver.cc35
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc60
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc486
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h152
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/timing.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc14
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc196
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h56
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc176
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc124
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc212
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding.gypi11
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc61
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h67
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_receiver.cc158
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_sender.cc90
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/BUILD.gn7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc136
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/brightness_detection.h35
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis.cc281
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis.h87
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc271
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/deflickering.cc402
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/deflickering.h55
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc53
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h17
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/include/video_processing.h44
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc122
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc50
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc100
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc58
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc143
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h13
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc72
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc112
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc94
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc48
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h22
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_decimator.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc524
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_denoiser.h49
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing.gypi7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc111
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h14
-rw-r--r--chromium/third_party/webrtc/modules/video_render/BUILD.gn178
-rw-r--r--chromium/third_party/webrtc/modules/video_render/DEPS5
-rw-r--r--chromium/third_party/webrtc/modules/video_render/OWNERS12
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc316
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h154
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc450
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h95
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc474
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h83
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc397
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h57
-rw-r--r--chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc195
-rw-r--r--chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h128
-rw-r--r--chromium/third_party/webrtc/modules/video_render/i_video_render.h129
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h64
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm330
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h45
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm61
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h87
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm285
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h105
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm170
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h34
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm163
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc261
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h128
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc315
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h96
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc153
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h58
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h33
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm87
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h32
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm55
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc1987
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h178
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc280
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h146
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h141
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm253
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h192
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm1247
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmpbin304182 -> 0 bytes
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc645
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h18
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm69
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.gypi218
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.h268
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_defines.h70
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_impl.cc602
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_impl.h215
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_internal.h27
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc825
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h110
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc1160
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h256
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc337
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h137
660 files changed, 14655 insertions, 29457 deletions
diff --git a/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate b/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate
new file mode 100644
index 00000000000..2122df84730
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'audio_codec_speed_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_audio_codec_speed_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_audio_codec_speed_tests',
+ 'audio_codec_speed_tests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
index 23b5fa46da5..d1f70cf105b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
@@ -39,6 +39,33 @@ if (!build_with_mozilla && !build_with_chromium) {
audio_codec_deps += [ ":red" ]
}
+source_set("audio_decoder_factory_interface") {
+ sources = [
+ "codecs/audio_decoder_factory.h",
+ "codecs/audio_format.cc",
+ "codecs/audio_format.h",
+ ]
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+ deps = [
+ "../..:webrtc_common",
+ ]
+}
+
+source_set("builtin_audio_decoder_factory") {
+ sources = [
+ "codecs/builtin_audio_decoder_factory.cc",
+ "codecs/builtin_audio_decoder_factory.h",
+ ]
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+ deps = [
+ "../..:webrtc_common",
+ ":audio_decoder_factory_interface",
+ ] + audio_codec_deps
+ defines = audio_codec_defines
+}
+
source_set("rent_a_codec") {
sources = [
"acm2/acm_codec_database.cc",
@@ -121,6 +148,7 @@ source_set("audio_decoder_interface") {
public_configs = [ "../..:common_inherited_config" ]
deps = [
"../..:webrtc_common",
+ "../../base:rtc_base_approved",
]
}
@@ -133,6 +161,7 @@ source_set("audio_encoder_interface") {
public_configs = [ "../..:common_inherited_config" ]
deps = [
"../..:webrtc_common",
+ "../../base:rtc_base_approved",
]
}
@@ -147,9 +176,7 @@ source_set("cng") {
sources = [
"codecs/cng/audio_encoder_cng.cc",
"codecs/cng/audio_encoder_cng.h",
- "codecs/cng/cng_helpfuns.c",
- "codecs/cng/cng_helpfuns.h",
- "codecs/cng/webrtc_cng.c",
+ "codecs/cng/webrtc_cng.cc",
"codecs/cng/webrtc_cng.h",
]
@@ -760,6 +787,8 @@ source_set("neteq") {
"neteq/buffer_level_filter.h",
"neteq/comfort_noise.cc",
"neteq/comfort_noise.h",
+ "neteq/cross_correlation.cc",
+ "neteq/cross_correlation.h",
"neteq/decision_logic.cc",
"neteq/decision_logic.h",
"neteq/decision_logic_fax.cc",
@@ -791,6 +820,8 @@ source_set("neteq") {
"neteq/neteq_impl.h",
"neteq/normal.cc",
"neteq/normal.h",
+ "neteq/packet.cc",
+ "neteq/packet.h",
"neteq/packet_buffer.cc",
"neteq/packet_buffer.h",
"neteq/payload_splitter.cc",
@@ -807,6 +838,8 @@ source_set("neteq") {
"neteq/statistics_calculator.h",
"neteq/sync_buffer.cc",
"neteq/sync_buffer.h",
+ "neteq/tick_timer.cc",
+ "neteq/tick_timer.h",
"neteq/time_stretch.cc",
"neteq/time_stretch.h",
"neteq/timestamp_scaler.cc",
@@ -822,9 +855,11 @@ source_set("neteq") {
deps = [
":audio_decoder_interface",
+ ":builtin_audio_decoder_factory",
":cng",
":g711",
":pcm16b",
+ ":rent_a_codec",
"../..:webrtc_common",
"../../common_audio",
"../../system_wrappers",
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
index e4a34a72946..dc8111daf3b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
@@ -158,8 +158,11 @@ void AcmReceiveTestOldApi::Run() {
// Pull audio until time to insert packet.
while (clock_.TimeInMilliseconds() < packet->time_ms()) {
AudioFrame output_frame;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(output_freq_hz_, &output_frame));
+ bool muted;
+ EXPECT_EQ(0,
+ acm_->PlayoutData10Ms(output_freq_hz_, &output_frame, &muted));
ASSERT_EQ(output_freq_hz_, output_frame.sample_rate_hz_);
+ ASSERT_FALSE(muted);
const size_t samples_per_block =
static_cast<size_t>(output_freq_hz_ * 10 / 1000);
EXPECT_EQ(samples_per_block, output_frame.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
index f8bacf8dc7d..8c07631e3a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
@@ -26,7 +26,6 @@
#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -133,11 +132,13 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
return 0;
}
-int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
+int AcmReceiver::GetAudio(int desired_freq_hz,
+ AudioFrame* audio_frame,
+ bool* muted) {
// Accessing members, take the lock.
rtc::CritScope lock(&crit_sect_);
- if (neteq_->GetAudio(audio_frame) != NetEq::kOK) {
+ if (neteq_->GetAudio(audio_frame, muted) != NetEq::kOK) {
LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
index 6fec1ffdda1..f37212c067b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
@@ -82,11 +82,13 @@ class AcmReceiver {
// Output:
// -audio_frame : an audio frame were output data and
// associated parameters are written to.
+ // -muted : if true, the sample data in audio_frame is not
+ // populated, and must be interpreted as all zero.
//
// Return value : 0 if OK.
// -1 if NetEq returned an error.
//
- int GetAudio(int desired_freq_hz, AudioFrame* audio_frame);
+ int GetAudio(int desired_freq_hz, AudioFrame* audio_frame, bool* muted);
//
// Adds a new codec to the NetEq codec database.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
index c39a7cc797f..b57b7ef446e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
@@ -285,7 +285,8 @@ TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) {
const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
InsertOnePacketOfSilence(codec.id);
for (int k = 0; k < num_10ms_frames; ++k) {
- EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame));
+ bool muted;
+ EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame, &muted));
}
EXPECT_EQ(codec.inst.plfreq, receiver_->last_output_sample_rate_hz());
}
@@ -326,13 +327,15 @@ class AcmReceiverTestFaxModeOldApi : public AcmReceiverTestOldApi {
rtc::CheckedDivExact(5 * output_sample_rate_hz, 8000);
AudioFrame frame;
- EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame));
+ bool muted;
+ EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame, &muted));
// Expect timestamp = 0 before first packet is inserted.
EXPECT_EQ(0u, frame.timestamp_);
for (int i = 0; i < 5; ++i) {
InsertOnePacketOfSilence(codec.id);
for (int k = 0; k < num_10ms_frames; ++k) {
- EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame));
+ EXPECT_EQ(0,
+ receiver_->GetAudio(output_sample_rate_hz, &frame, &muted));
EXPECT_EQ(expected_output_ts, frame.timestamp_);
expected_output_ts += 10 * samples_per_ms;
EXPECT_EQ(10 * samples_per_ms, frame.samples_per_channel_);
@@ -340,6 +343,7 @@ class AcmReceiverTestFaxModeOldApi : public AcmReceiverTestOldApi {
EXPECT_EQ(output_channels, frame.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, frame.speech_type_);
EXPECT_EQ(expected_vad_activity, frame.vad_activity_);
+ EXPECT_FALSE(muted);
}
}
}
@@ -388,8 +392,10 @@ TEST_F(AcmReceiverTestOldApi, MAYBE_PostdecodingVad) {
AudioFrame frame;
for (int n = 0; n < kNumPackets; ++n) {
InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ for (int k = 0; k < num_10ms_frames; ++k) {
+ bool muted;
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame, &muted));
+ }
}
EXPECT_EQ(AudioFrame::kVadPassive, frame.vad_activity_);
}
@@ -417,8 +423,10 @@ TEST_F(AcmReceiverTestPostDecodeVadPassiveOldApi, MAYBE_PostdecodingVad) {
AudioFrame frame;
for (int n = 0; n < kNumPackets; ++n) {
InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ for (int k = 0; k < num_10ms_frames; ++k) {
+ bool muted;
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame, &muted));
+ }
}
EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
index cfee3530206..938e39e2e69 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
@@ -47,7 +47,7 @@ class AcmSendTestOldApi : public AudioPacketizationCallback,
// Returns the next encoded packet. Returns NULL if the test duration was
// exceeded. Ownership of the packet is handed over to the caller.
// Inherited from PacketSource.
- Packet* NextPacket();
+ Packet* NextPacket() override;
// Inherited from AudioPacketizationCallback.
int32_t SendData(FrameType frame_type,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
index 254c2f420bf..bc7197d8e79 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
@@ -29,6 +29,18 @@
namespace webrtc {
+namespace {
+
+// Adds a codec usage sample to the histogram.
+void UpdateCodecTypeHistogram(size_t codec_type) {
+ RTC_HISTOGRAM_ENUMERATION(
+ "WebRTC.Audio.Encoder.CodecType", static_cast<int>(codec_type),
+ static_cast<int>(
+ webrtc::AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes));
+}
+
+} // namespace
+
namespace acm2 {
struct EncoderFactory {
@@ -104,7 +116,6 @@ void ConvertEncodedInfoToFragmentationHeader(
class RawAudioEncoderWrapper final : public AudioEncoder {
public:
RawAudioEncoderWrapper(AudioEncoder* enc) : enc_(enc) {}
- size_t MaxEncodedBytes() const override { return enc_->MaxEncodedBytes(); }
int SampleRateHz() const override { return enc_->SampleRateHz(); }
size_t NumChannels() const override { return enc_->NumChannels(); }
int RtpTimestampRateHz() const override { return enc_->RtpTimestampRateHz(); }
@@ -120,13 +131,6 @@ class RawAudioEncoderWrapper final : public AudioEncoder {
rtc::Buffer* encoded) override {
return enc_->Encode(rtp_timestamp, audio, encoded);
}
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override {
- return enc_->EncodeInternal(rtp_timestamp, audio, max_encoded_bytes,
- encoded);
- }
void Reset() override { return enc_->Reset(); }
bool SetFec(bool enable) override { return enc_->SetFec(enable); }
bool SetDtx(bool enable) override { return enc_->SetDtx(enable); }
@@ -193,7 +197,9 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(
first_10ms_data_(false),
first_frame_(true),
packetization_callback_(NULL),
- vad_callback_(NULL) {
+ vad_callback_(NULL),
+ codec_histogram_bins_log_(),
+ number_of_consecutive_empty_packets_(0) {
if (InitializeReceiverSafe() < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot initialize receiver");
@@ -239,6 +245,20 @@ int32_t AudioCodingModuleImpl::Encode(const InputData& input_data) {
}
previous_pltype = previous_pltype_; // Read it while we have the critsect.
+ // Log codec type to histogram once every 500 packets.
+ if (encoded_info.encoded_bytes == 0) {
+ ++number_of_consecutive_empty_packets_;
+ } else {
+ size_t codec_type = static_cast<size_t>(encoded_info.encoder_type);
+ codec_histogram_bins_log_[codec_type] +=
+ number_of_consecutive_empty_packets_ + 1;
+ number_of_consecutive_empty_packets_ = 0;
+ if (codec_histogram_bins_log_[codec_type] >= 500) {
+ codec_histogram_bins_log_[codec_type] -= 500;
+ UpdateCodecTypeHistogram(codec_type);
+ }
+ }
+
RTPFragmentationHeader my_fragmentation;
ConvertEncodedInfoToFragmentationHeader(encoded_info, &my_fragmentation);
FrameType frame_type;
@@ -727,10 +747,12 @@ int AudioCodingModuleImpl::RegisterReceiveCodecUnlocked(
AudioDecoder* isac_decoder = nullptr;
if (STR_CASE_CMP(codec.plname, "isac") == 0) {
- if (!isac_decoder_) {
- isac_decoder_ = isac_factory();
+ std::unique_ptr<AudioDecoder>& saved_isac_decoder =
+ codec.plfreq == 16000 ? isac_decoder_16k_ : isac_decoder_32k_;
+ if (!saved_isac_decoder) {
+ saved_isac_decoder = isac_factory();
}
- isac_decoder = isac_decoder_.get();
+ isac_decoder = saved_isac_decoder.get();
}
return receiver_.AddCodec(*codec_index, codec.pltype, codec.channels,
codec.plfreq, isac_decoder, codec.plname);
@@ -797,9 +819,10 @@ int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
// Get 10 milliseconds of raw audio data to play out.
// Automatic resample to the requested frequency.
int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
- AudioFrame* audio_frame) {
+ AudioFrame* audio_frame,
+ bool* muted) {
// GetAudio always returns 10 ms, at the requested sample rate.
- if (receiver_.GetAudio(desired_freq_hz, audio_frame) != 0) {
+ if (receiver_.GetAudio(desired_freq_hz, audio_frame, muted) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"PlayoutData failed, RecOut Failed");
return -1;
@@ -808,6 +831,14 @@ int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
return 0;
}
+int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
+ AudioFrame* audio_frame) {
+ bool muted;
+ int ret = PlayoutData10Ms(desired_freq_hz, audio_frame, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
+}
+
/////////////////////////////////////////
// Statistics
//
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
index 63dfb810567..c098e62b991 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
@@ -23,6 +23,7 @@
#include "webrtc/modules/audio_coding/acm2/acm_receiver.h"
#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/acm2/codec_manager.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
namespace webrtc {
@@ -163,6 +164,9 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// Get 10 milliseconds of raw audio data to play out, and
// automatic resample to the requested frequency if > 0.
+ int PlayoutData10Ms(int desired_freq_hz,
+ AudioFrame* audio_frame,
+ bool* muted) override;
int PlayoutData10Ms(int desired_freq_hz, AudioFrame* audio_frame) override;
/////////////////////////////////////////
@@ -268,7 +272,8 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// RegisterEncoder.
std::unique_ptr<AudioEncoder> encoder_stack_ GUARDED_BY(acm_crit_sect_);
- std::unique_ptr<AudioDecoder> isac_decoder_ GUARDED_BY(acm_crit_sect_);
+ std::unique_ptr<AudioDecoder> isac_decoder_16k_ GUARDED_BY(acm_crit_sect_);
+ std::unique_ptr<AudioDecoder> isac_decoder_32k_ GUARDED_BY(acm_crit_sect_);
// This is to keep track of CN instances where we can send DTMFs.
uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
@@ -294,6 +299,10 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
AudioPacketizationCallback* packetization_callback_
GUARDED_BY(callback_crit_sect_);
ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
+
+ int codec_histogram_bins_log_[static_cast<size_t>(
+ AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes)];
+ int number_of_consecutive_empty_packets_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
index 6e004f9e28d..470f690ed9c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
@@ -205,7 +205,9 @@ class AudioCodingModuleTestOldApi : public ::testing::Test {
virtual void PullAudio() {
AudioFrame audio_frame;
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
+ bool muted;
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
}
virtual void InsertAudio() {
@@ -296,7 +298,9 @@ TEST_F(AudioCodingModuleTestOldApi, MAYBE_NetEqCalls) {
TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
AudioFrame audio_frame;
const int kSampleRateHz = 32000;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
+ bool muted;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(id_, audio_frame.id_);
EXPECT_EQ(0u, audio_frame.timestamp_);
EXPECT_GT(audio_frame.num_channels_, 0u);
@@ -307,7 +311,8 @@ TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
AudioFrame audio_frame;
- EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
+ bool muted;
+ EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame, &muted));
}
// Checks that the transport callback is invoked once for each speech packet.
@@ -608,7 +613,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
~AcmIsacMtTestOldApi() {}
- void SetUp() {
+ void SetUp() override {
AudioCodingModuleTestOldApi::SetUp();
RegisterCodec(); // Must be called before the threads start below.
@@ -642,7 +647,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
}
- void InsertPacket() {
+ void InsertPacket() override {
int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
if (num_calls > last_packet_number_) {
// Get the new payload out from the callback handler.
@@ -661,7 +666,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
&last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
}
- void InsertAudio() {
+ void InsertAudio() override {
// TODO(kwiberg): Use std::copy here. Might be complications because AFAICS
// this call confuses the number of samples with the number of bytes, and
// ends up copying only half of what it should.
@@ -677,7 +682,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
// This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but
// here it is using the constants defined in this class (i.e., shorter test
// run).
- virtual bool TestDone() {
+ bool TestDone() override {
if (packet_cb_.num_calls() > kNumPackets) {
rtc::CritScope lock(&crit_sect_);
if (pull_audio_count_ > kNumPullCalls) {
@@ -728,7 +733,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
clock_ = fake_clock_.get();
}
- void SetUp() {
+ void SetUp() override {
AudioCodingModuleTestOldApi::SetUp();
// Set up input audio source to read from specified file, loop after 5
// seconds, and deliver blocks of 10 ms.
@@ -757,7 +762,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
codec_registration_thread_.SetPriority(rtc::kRealtimePriority);
}
- void TearDown() {
+ void TearDown() override {
AudioCodingModuleTestOldApi::TearDown();
receive_thread_.Stop();
codec_registration_thread_.Stop();
@@ -806,8 +811,13 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
// Pull audio.
for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) {
AudioFrame audio_frame;
+ bool muted;
EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */,
- &audio_frame));
+ &audio_frame, &muted));
+ if (muted) {
+ ADD_FAILURE();
+ return false;
+ }
fake_clock_->AdvanceTimeMilliseconds(10);
}
rtp_utility_->Forward(&rtp_header_);
@@ -939,34 +949,34 @@ class AcmReceiverBitExactnessOldApi : public ::testing::Test {
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
TEST_F(AcmReceiverBitExactnessOldApi, 8kHzOutput) {
- Run(8000, PlatformChecksum("d9334a99c7f185860028e6f08e5b7390",
- "946803da293ef3fa39242d3059eac491",
- "efb5a07480bad8afb184c4150f4b3f3a",
- "51717ab374871cbfa2c6977ea2aa40f3"),
+ Run(8000, PlatformChecksum("90be25dd9505005aaadf91b77ee31624",
+ "ac6dc4b5bf6d277f693889c4c916882e",
+ "a607f7d0ba98683c9c236217f86aaa6b",
+ "4a54f6ec712bda58484a388e1a332b42"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 16kHzOutput) {
- Run(16000, PlatformChecksum("9ad7d5a5f3c9fac4e880a6fbfd9d3ac8",
- "4fc1b82404ae33511c1cdb385774b2a4",
- "f580bfd4e5e29f0399b61b7512d4e3b4",
- "5b2ae32c590b41d0c601179e14eaae96"),
+ Run(16000, PlatformChecksum("2c713197d41becd52c1ceecbd2b9f687",
+ "130cc2a43063c74197122e3760690e7d",
+ "cdc3d88f6d8e497d4e00c62c0e6dbb3c",
+ "83edb67c157d0e3a0fb9f7d7b1ce5dc7"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 32kHzOutput) {
- Run(32000, PlatformChecksum("08e6085ccb96494b242f0ecc4c8a2dc8",
- "d1f853b1e046c67c9ee186786eaf2124",
- "fdf5166b98c43235978685e40e28fea6",
- "7f620312f2fa74a10048bbb7739d4bf3"),
+ Run(32000, PlatformChecksum("fe5851d43c13df66a7ad30fdb124e62f",
+ "309d24be4b287dc92c340f10a807a11e",
+ "c4a0e0b2e031d62c693af2a9ff4337ac",
+ "4cbfc6ab4d704f5d9b4f10406437fda9"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutput) {
- Run(48000, PlatformChecksum("31343887b7ef70772df733d072b0dd00",
- "f6893278d75dad42ac44bff77f674b33",
- "71f89e87ee1bad594f529d6c036289ad",
- "b64c891e99eccc9ff45541ef67c9e9bf"),
+ Run(48000, PlatformChecksum("a9241f426b4bf2ac650b6d287469a550",
+ "30374fd4a932df942c1b1120e7b724ad",
+ "22242dd832824046d48db9ea8a01f84c",
+ "c7f46bf165400b266d9b57aee02d2747"),
std::vector<ExternalDecoder>());
}
@@ -1021,10 +1031,10 @@ TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutputExternalDecoder) {
std::vector<ExternalDecoder> external_decoders;
external_decoders.push_back(ed);
- Run(48000, PlatformChecksum("31343887b7ef70772df733d072b0dd00",
- "f6893278d75dad42ac44bff77f674b33",
- "71f89e87ee1bad594f529d6c036289ad",
- "b64c891e99eccc9ff45541ef67c9e9bf"),
+ Run(48000, PlatformChecksum("a9241f426b4bf2ac650b6d287469a550",
+ "30374fd4a932df942c1b1120e7b724ad",
+ "22242dd832824046d48db9ea8a01f84c",
+ "c7f46bf165400b266d9b57aee02d2747"),
external_decoders);
EXPECT_CALL(mock_decoder, Die());
@@ -1737,7 +1747,7 @@ class AcmSwitchingOutputFrequencyOldApi : public ::testing::Test,
}
// Inherited from test::AudioSink.
- bool WriteArray(const int16_t* audio, size_t num_samples) {
+ bool WriteArray(const int16_t* audio, size_t num_samples) override {
// Skip checking the first output frame, since it has a number of zeros
// due to how NetEq is initialized.
if (first_output_) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
index 81adf81a83c..f028c45f991 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
@@ -113,7 +113,7 @@ bool CodecManager::RegisterEncoder(const CodecInst& send_codec) {
}
send_codec_inst_ = rtc::Optional<CodecInst>(send_codec);
- codec_stack_params_.speech_encoder.reset(); // Caller must recreate it.
+ recreate_encoder_ = true; // Caller must recreate it.
return true;
}
@@ -190,5 +190,67 @@ bool CodecManager::SetCodecFEC(bool enable_codec_fec) {
return true;
}
+bool CodecManager::MakeEncoder(RentACodec* rac, AudioCodingModule* acm) {
+ RTC_DCHECK(rac);
+ RTC_DCHECK(acm);
+
+ if (!recreate_encoder_) {
+ bool error = false;
+ // Try to re-use the speech encoder we've given to the ACM.
+ acm->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
+ if (!*encoder) {
+ // There is no existing encoder.
+ recreate_encoder_ = true;
+ return;
+ }
+
+ // Extract the speech encoder from the ACM.
+ std::unique_ptr<AudioEncoder> enc = std::move(*encoder);
+ while (true) {
+ auto sub_enc = enc->ReclaimContainedEncoders();
+ if (sub_enc.empty()) {
+ break;
+ }
+ RTC_CHECK_EQ(1u, sub_enc.size());
+
+ // Replace enc with its sub encoder. We need to put the sub encoder in
+ // a temporary first, since otherwise the old value of enc would be
+ // destroyed before the new value got assigned, which would be bad
+ // since the new value is a part of the old value.
+ auto tmp_enc = std::move(sub_enc[0]);
+ enc = std::move(tmp_enc);
+ }
+
+ // Wrap it in a new encoder stack and put it back.
+ codec_stack_params_.speech_encoder = std::move(enc);
+ *encoder = rac->RentEncoderStack(&codec_stack_params_);
+ if (!*encoder) {
+ error = true;
+ }
+ });
+ if (error) {
+ return false;
+ }
+ if (!recreate_encoder_) {
+ return true;
+ }
+ }
+
+ if (!send_codec_inst_) {
+ // We don't have the information we need to create a new speech encoder.
+ // (This is not an error.)
+ return true;
+ }
+
+ codec_stack_params_.speech_encoder = rac->RentEncoder(*send_codec_inst_);
+ auto stack = rac->RentEncoderStack(&codec_stack_params_);
+ if (!stack) {
+ return false;
+ }
+ acm->SetEncoder(std::move(stack));
+ recreate_encoder_ = false;
+ return true;
+}
+
} // namespace acm2
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
index f6c6cd46d2c..b60b7e7bcbd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
@@ -59,31 +59,13 @@ class CodecManager final {
// Uses the provided Rent-A-Codec to create a new encoder stack, if we have a
// complete specification; if so, it is then passed to set_encoder. On error,
// returns false.
- bool MakeEncoder(RentACodec* rac, AudioCodingModule* acm) {
- RTC_DCHECK(rac);
- RTC_DCHECK(acm);
- if (!codec_stack_params_.speech_encoder && send_codec_inst_) {
- // We have no speech encoder, but we have a specification for making one.
- auto enc = rac->RentEncoder(*send_codec_inst_);
- if (!enc)
- return false;
- codec_stack_params_.speech_encoder = std::move(enc);
- }
- auto stack = rac->RentEncoderStack(&codec_stack_params_);
- if (stack) {
- // Give new encoder stack to the ACM.
- acm->SetEncoder(std::move(stack));
- } else {
- // The specification was good but incomplete, so we have no encoder stack
- // to give to the ACM.
- }
- return true;
- }
+ bool MakeEncoder(RentACodec* rac, AudioCodingModule* acm);
private:
rtc::ThreadChecker thread_checker_;
rtc::Optional<CodecInst> send_codec_inst_;
RentACodec::StackParameters codec_stack_params_;
+ bool recreate_encoder_ = true; // Need to recreate encoder?
RTC_DISALLOW_COPY_AND_ASSIGN(CodecManager);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
index 7f1e52030dc..a61f15949d7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
@@ -47,6 +47,59 @@
namespace webrtc {
namespace acm2 {
+rtc::Optional<SdpAudioFormat> RentACodec::NetEqDecoderToSdpAudioFormat(
+ NetEqDecoder nd) {
+ switch (nd) {
+ case NetEqDecoder::kDecoderPCMu:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcmu", 8000, 1));
+ case NetEqDecoder::kDecoderPCMa:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcma", 8000, 1));
+ case NetEqDecoder::kDecoderPCMu_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcmu", 8000, 2));
+ case NetEqDecoder::kDecoderPCMa_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcma", 8000, 2));
+ case NetEqDecoder::kDecoderILBC:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("ilbc", 8000, 1));
+ case NetEqDecoder::kDecoderISAC:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("isac", 16000, 1));
+ case NetEqDecoder::kDecoderISACswb:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("isac", 32000, 1));
+ case NetEqDecoder::kDecoderPCM16B:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 1));
+ case NetEqDecoder::kDecoderPCM16Bwb:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 16000, 1));
+ case NetEqDecoder::kDecoderPCM16Bswb32kHz:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 32000, 1));
+ case NetEqDecoder::kDecoderPCM16Bswb48kHz:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 48000, 1));
+ case NetEqDecoder::kDecoderPCM16B_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 2));
+ case NetEqDecoder::kDecoderPCM16Bwb_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 16000, 2));
+ case NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 32000, 2));
+ case NetEqDecoder::kDecoderPCM16Bswb48kHz_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 48000, 2));
+ case NetEqDecoder::kDecoderPCM16B_5ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 5));
+ case NetEqDecoder::kDecoderG722:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("g722", 8000, 1));
+ case NetEqDecoder::kDecoderG722_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("g722", 8000, 2));
+ case NetEqDecoder::kDecoderOpus:
+ return rtc::Optional<SdpAudioFormat>(
+ SdpAudioFormat("opus", 48000, 2,
+ std::map<std::string, std::string>{{"stereo", "0"}}));
+ case NetEqDecoder::kDecoderOpus_2ch:
+ return rtc::Optional<SdpAudioFormat>(
+ SdpAudioFormat("opus", 48000, 2,
+ std::map<std::string, std::string>{{"stereo", "1"}}));
+
+ default:
+ return rtc::Optional<SdpAudioFormat>();
+ }
+}
+
rtc::Optional<RentACodec::CodecId> RentACodec::CodecIdByParams(
const char* payload_name,
int sampling_freq_hz,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
index a4026acd283..bac37afa963 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
@@ -20,6 +20,7 @@
#include "webrtc/base/optional.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
#include "webrtc/typedefs.h"
@@ -135,6 +136,9 @@ class RentACodec {
kDecoderOpus_2ch,
};
+ static rtc::Optional<SdpAudioFormat> NetEqDecoderToSdpAudioFormat(
+ NetEqDecoder nd);
+
static inline size_t NumberOfCodecs() {
return static_cast<size_t>(CodecId::kNumCodecs);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
index e460853ac10..af2cb3c49dc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
@@ -69,6 +69,50 @@
},
'targets': [
{
+ 'target_name': 'audio_decoder_factory_interface',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ },
+ 'sources': [
+ 'codecs/audio_decoder_factory.h',
+ 'codecs/audio_format.cc',
+ 'codecs/audio_format.h',
+ ],
+ },
+ {
+ 'target_name': 'builtin_audio_decoder_factory',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_codec_defines)',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<@(audio_codec_dependencies)',
+ 'audio_decoder_factory_interface',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ },
+ 'sources': [
+ 'codecs/builtin_audio_decoder_factory.cc',
+ 'codecs/builtin_audio_decoder_factory.h',
+ ],
+ },
+ {
'target_name': 'rent_a_codec',
'type': 'static_library',
'defines': [
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
index e60309a6dfa..0b1a22d8d04 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
@@ -39,17 +39,4 @@
],
},
],
- 'conditions': [
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'audio_codec_speed_tests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_codec_speed_tests_apk',
- ],
- },
- ],
- }],
- ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
index d2984b97b09..442ddc1e4b8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
@@ -82,11 +82,6 @@ bool AudioDecoder::PacketHasFec(const uint8_t* encoded,
return false;
}
-CNG_dec_inst* AudioDecoder::CngDecoderInstance() {
- FATAL() << "Not a CNG decoder";
- return NULL;
-}
-
AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) {
switch (type) {
case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
index 81ac8731830..580ddbf74ff 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
@@ -14,7 +14,6 @@
#include <stdlib.h> // NULL
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -94,10 +93,6 @@ class AudioDecoder {
// Returns true if the packet has FEC and false otherwise.
virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
- // If this is a CNG decoder, return the underlying CNG_dec_inst*. If this
- // isn't a CNG decoder, don't call this method.
- virtual CNG_dec_inst* CngDecoderInstance();
-
virtual size_t Channels() const = 0;
protected:
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h
new file mode 100644
index 00000000000..12b97780918
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+
+namespace webrtc {
+
+// A factory that creates AudioDecoders.
+// NOTE: This class is still under development and may change without notice.
+class AudioDecoderFactory {
+ public:
+ virtual ~AudioDecoderFactory() = default;
+
+ virtual std::vector<SdpAudioFormat> GetSupportedFormats() = 0;
+
+ virtual std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc
new file mode 100644
index 00000000000..12a0a4047e8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+
+namespace webrtc {
+
+TEST(AudioDecoderFactoryTest, CreateUnknownDecoder) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("rey", 8000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreatePcmu) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // PCMu supports 8 kHz, and any number of channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 2)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 16000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreatePcma) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // PCMa supports 8 kHz, and any number of channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 2)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 16000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreateIlbc) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // iLBC supports 8 kHz, 1 channel.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 16000, 1)));
+
+ // iLBC actually uses a 16 kHz sample rate instead of the nominal 8 kHz.
+ // TODO(kwiberg): Uncomment this once AudioDecoder has a SampleRateHz method.
+ // std::unique_ptr<AudioDecoder> dec =
+ // adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 1));
+ // EXPECT_EQ(16000, dec->SampleRateHz());
+}
+
+TEST(AudioDecoderFactoryTest, CreateIsac) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // iSAC supports 16 kHz, 1 channel. The float implementation additionally
+ // supports 32 kHz, 1 channel.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 8000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 48000, 1)));
+#ifdef WEBRTC_ARCH_ARM
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1)));
+#else
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1)));
+#endif
+}
+
+TEST(AudioDecoderFactoryTest, CreateL16) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // L16 supports any clock rate, any number of channels.
+ const int clockrates[] = {8000, 16000, 32000, 48000};
+ const int num_channels[] = {1, 2, 3, 4711};
+ for (int clockrate : clockrates) {
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("l16", clockrate, 0)));
+ for (int channels : num_channels) {
+ EXPECT_TRUE(
+ adf->MakeAudioDecoder(SdpAudioFormat("l16", clockrate, channels)));
+ }
+ }
+}
+
+TEST(AudioDecoderFactoryTest, CreateG722) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // g722 supports 8 kHz, 1-2 channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 16000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 32000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreateOpus) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // Opus supports 48 kHz, 2 channels, and wants a "stereo" parameter whose
+ // value is either "0" or "1".
+ for (int hz : {8000, 16000, 32000, 48000}) {
+ for (int channels : {0, 1, 2, 3}) {
+ for (std::string stereo : {"XX", "0", "1", "2"}) {
+ std::map<std::string, std::string> params;
+ if (stereo != "XX") {
+ params["stereo"] = stereo;
+ }
+ bool good =
+ (hz == 48000 && channels == 2 && (stereo == "0" || stereo == "1"));
+ EXPECT_EQ(good, static_cast<bool>(adf->MakeAudioDecoder(SdpAudioFormat(
+ "opus", hz, channels, std::move(params)))));
+ }
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
index 6f793e25314..6b7f5f893fd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
@@ -16,8 +16,13 @@
namespace webrtc {
AudioEncoder::EncodedInfo::EncodedInfo() = default;
-
+AudioEncoder::EncodedInfo::EncodedInfo(const EncodedInfo&) = default;
+AudioEncoder::EncodedInfo::EncodedInfo(EncodedInfo&&) = default;
AudioEncoder::EncodedInfo::~EncodedInfo() = default;
+AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(
+ const EncodedInfo&) = default;
+AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(EncodedInfo&&) =
+ default;
int AudioEncoder::RtpTimestampRateHz() const {
return SampleRateHz();
@@ -37,55 +42,6 @@ AudioEncoder::EncodedInfo AudioEncoder::Encode(
return info;
}
-AudioEncoder::EncodedInfo AudioEncoder::Encode(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) {
- return DEPRECATED_Encode(rtp_timestamp, audio, max_encoded_bytes, encoded);
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::DEPRECATED_Encode(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) {
- TRACE_EVENT0("webrtc", "AudioEncoder::Encode");
- RTC_CHECK_EQ(audio.size(),
- static_cast<size_t>(NumChannels() * SampleRateHz() / 100));
- EncodedInfo info =
- EncodeInternal(rtp_timestamp, audio, max_encoded_bytes, encoded);
- RTC_CHECK_LE(info.encoded_bytes, max_encoded_bytes);
- return info;
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::EncodeImpl(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded)
-{
- EncodedInfo info;
- encoded->AppendData(MaxEncodedBytes(), [&] (rtc::ArrayView<uint8_t> encoded) {
- info = EncodeInternal(rtp_timestamp, audio,
- encoded.size(), encoded.data());
- return info.encoded_bytes;
- });
- return info;
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::EncodeInternal(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded)
-{
- rtc::Buffer temp_buffer;
- EncodedInfo info = EncodeImpl(rtp_timestamp, audio, &temp_buffer);
- RTC_DCHECK_LE(temp_buffer.size(), max_encoded_bytes);
- std::memcpy(encoded, temp_buffer.data(), info.encoded_bytes);
- return info;
-}
-
bool AudioEncoder::SetFec(bool enable) {
return !enable;
}
@@ -104,4 +60,7 @@ void AudioEncoder::SetProjectedPacketLossRate(double fraction) {}
void AudioEncoder::SetTargetBitrate(int target_bps) {}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoder::ReclaimContainedEncoders() { return nullptr; }
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
index 3fdee259ce7..ecc28d96a16 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
@@ -25,12 +25,32 @@ namespace webrtc {
// type must have an implementation of this class.
class AudioEncoder {
public:
+ // Used for UMA logging of codec usage. The same codecs, with the
+ // same values, must be listed in
+ // src/tools/metrics/histograms/histograms.xml in chromium to log
+ // correct values.
+ enum class CodecType {
+ kOther = 0, // Codec not specified, and/or not listed in this enum
+ kOpus = 1,
+ kIsac = 2,
+ kPcmA = 3,
+ kPcmU = 4,
+ kG722 = 5,
+ kIlbc = 6,
+
+ // Number of histogram bins in the UMA logging of codec types. The
+ // total number of different codecs that are logged cannot exceed this
+ // number.
+ kMaxLoggedAudioCodecTypes
+ };
+
struct EncodedInfoLeaf {
size_t encoded_bytes = 0;
uint32_t encoded_timestamp = 0;
int payload_type = 0;
bool send_even_if_empty = false;
bool speech = true;
+ CodecType encoder_type = CodecType::kOther;
};
// This is the main struct for auxiliary encoding information. Each encoded
@@ -45,21 +65,17 @@ class AudioEncoder {
// vector.
struct EncodedInfo : public EncodedInfoLeaf {
EncodedInfo();
+ EncodedInfo(const EncodedInfo&);
+ EncodedInfo(EncodedInfo&&);
~EncodedInfo();
+ EncodedInfo& operator=(const EncodedInfo&);
+ EncodedInfo& operator=(EncodedInfo&&);
std::vector<EncodedInfoLeaf> redundant;
};
virtual ~AudioEncoder() = default;
- // Returns the maximum number of bytes that can be produced by the encoder
- // at each Encode() call. The caller can use the return value to determine
- // the size of the buffer that needs to be allocated. This value is allowed
- // to depend on encoder parameters like bitrate, frame size etc., so if
- // any of these change, the caller of Encode() is responsible for checking
- // that the buffer is large enough by calling MaxEncodedBytes() again.
- virtual size_t MaxEncodedBytes() const = 0;
-
// Returns the input sample rate in Hz and the number of input channels.
// These are constants set at instantiation time.
virtual int SampleRateHz() const = 0;
@@ -95,33 +111,6 @@ class AudioEncoder {
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded);
- // Deprecated interface to Encode (remove eventually, bug 5591). May incur a
- // copy. The encoder produces zero or more bytes of output in |encoded| and
- // returns additional encoding information. The caller is responsible for
- // making sure that |max_encoded_bytes| is not smaller than the number of
- // bytes actually produced by the encoder.
- RTC_DEPRECATED EncodedInfo Encode(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
- EncodedInfo DEPRECATED_Encode(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
- // Deprecated interface EncodeInternal (see bug 5591). May incur a copy.
- // Subclasses implement this to perform the actual encoding. Called by
- // Encode(). By default, this is implemented as a call to the newer
- // EncodeImpl() that accepts an rtc::Buffer instead of a raw pointer.
- // That version is protected, so see below. At least one of EncodeInternal
- // or EncodeImpl _must_ be implemented by a subclass.
- virtual EncodedInfo EncodeInternal(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
// Resets the encoder to its starting state, discarding any input that has
// been fed to the encoder but not yet emitted in a packet.
virtual void Reset() = 0;
@@ -160,15 +149,21 @@ class AudioEncoder {
// implementation does the latter).
virtual void SetTargetBitrate(int target_bps);
+ // Causes this encoder to let go of any other encoders it contains, and
+ // returns a pointer to an array where they are stored (which is required to
+ // live as long as this encoder). Unless the returned array is empty, you may
+ // not call any methods on this encoder afterwards, except for the
+ // destructor. The default implementation just returns an empty array.
+ // NOTE: This method is subject to change. Do not call or override it.
+ virtual rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+ ReclaimContainedEncoders();
+
protected:
// Subclasses implement this to perform the actual encoding. Called by
- // Encode(). For compatibility reasons, this is implemented by default as a
- // call to the older interface EncodeInternal(). At least one of
- // EncodeInternal or EncodeImpl _must_ be implemented by a
- // subclass. Preferably this one.
+ // Encode().
virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded);
+ rtc::Buffer* encoded) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_ENCODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc
deleted file mode 100644
index 71ffcde323b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
-
-using ::testing::_;
-using ::testing::Invoke;
-using ::testing::Return;
-
-namespace webrtc {
-
-TEST(AudioEncoderTest, EncodeInternalRedirectsOk) {
- const size_t kPayloadSize = 16;
- const uint8_t payload[kPayloadSize] =
- {0xf, 0xe, 0xd, 0xc, 0xb, 0xa, 0x9, 0x8,
- 0x7, 0x6, 0x5, 0x4, 0x3, 0x2, 0x1, 0x0};
-
- MockAudioEncoderDeprecated old_impl;
- MockAudioEncoder new_impl;
- MockAudioEncoderBase* impls[] = { &old_impl, &new_impl };
- for (auto* impl : impls) {
- EXPECT_CALL(*impl, Die());
- EXPECT_CALL(*impl, MaxEncodedBytes())
- .WillRepeatedly(Return(kPayloadSize * 2));
- EXPECT_CALL(*impl, NumChannels()).WillRepeatedly(Return(1));
- EXPECT_CALL(*impl, SampleRateHz()).WillRepeatedly(Return(8000));
- }
-
- EXPECT_CALL(old_impl, EncodeInternal(_, _, _, _)).WillOnce(
- Invoke(MockAudioEncoderDeprecated::CopyEncoding(payload)));
-
- EXPECT_CALL(new_impl, EncodeImpl(_, _, _)).WillOnce(
- Invoke(MockAudioEncoder::CopyEncoding(payload)));
-
- int16_t audio[80];
- uint8_t output_array[kPayloadSize * 2];
- rtc::Buffer output_buffer;
-
- AudioEncoder* old_encoder = &old_impl;
- AudioEncoder* new_encoder = &new_impl;
- auto old_info = old_encoder->Encode(0, audio, &output_buffer);
- auto new_info = new_encoder->DEPRECATED_Encode(0, audio,
- kPayloadSize * 2,
- output_array);
-
- EXPECT_EQ(old_info.encoded_bytes, kPayloadSize);
- EXPECT_EQ(new_info.encoded_bytes, kPayloadSize);
- EXPECT_EQ(output_buffer.size(), kPayloadSize);
-
- for (size_t i = 0; i != kPayloadSize; ++i) {
- EXPECT_EQ(output_buffer.data()[i], payload[i]);
- EXPECT_EQ(output_array[i], payload[i]);
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc
new file mode 100644
index 00000000000..bb69cbdb2f7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+
+namespace webrtc {
+
+SdpAudioFormat::SdpAudioFormat() = default;
+SdpAudioFormat::SdpAudioFormat(const SdpAudioFormat&) = default;
+SdpAudioFormat::SdpAudioFormat(SdpAudioFormat&&) = default;
+
+SdpAudioFormat::SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels)
+ : name(name), clockrate_hz(clockrate_hz), num_channels(num_channels) {}
+
+SdpAudioFormat::SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels,
+ Parameters&& param)
+ : name(name),
+ clockrate_hz(clockrate_hz),
+ num_channels(num_channels),
+ parameters(std::move(param)) {}
+
+SdpAudioFormat::~SdpAudioFormat() = default;
+SdpAudioFormat& SdpAudioFormat::operator=(const SdpAudioFormat&) = default;
+SdpAudioFormat& SdpAudioFormat::operator=(SdpAudioFormat&&) = default;
+
+void swap(SdpAudioFormat& a, SdpAudioFormat& b) {
+ using std::swap;
+ swap(a.name, b.name);
+ swap(a.clockrate_hz, b.clockrate_hz);
+ swap(a.num_channels, b.num_channels);
+ swap(a.parameters, b.parameters);
+}
+
+std::ostream& operator<<(std::ostream& os, const SdpAudioFormat& saf) {
+ os << "{name: " << saf.name;
+ os << ", clockrate_hz: " << saf.clockrate_hz;
+ os << ", num_channels: " << saf.num_channels;
+ os << ", parameters: {";
+ const char* sep = "";
+ for (const auto& kv : saf.parameters) {
+ os << sep << kv.first << ": " << kv.second;
+ sep = ", ";
+ }
+ os << "}}";
+ return os;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h
new file mode 100644
index 00000000000..61c0dd9f6fa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
+
+#include <map>
+#include <ostream>
+#include <string>
+#include <utility>
+
+namespace webrtc {
+
+// SDP specification for a single audio codec.
+// NOTE: This class is still under development and may change without notice.
+struct SdpAudioFormat {
+ using Parameters = std::map<std::string, std::string>;
+
+ // TODO(kwiberg): Get rid of the default constructor when rtc::Optional no
+ // longer requires it.
+ SdpAudioFormat();
+ SdpAudioFormat(const SdpAudioFormat&);
+ SdpAudioFormat(SdpAudioFormat&&);
+ SdpAudioFormat(const char* name, int clockrate_hz, int num_channels);
+ SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels,
+ Parameters&& param);
+ ~SdpAudioFormat();
+
+ SdpAudioFormat& operator=(const SdpAudioFormat&);
+ SdpAudioFormat& operator=(SdpAudioFormat&&);
+
+ std::string name;
+ int clockrate_hz;
+ int num_channels;
+ Parameters parameters;
+ // Parameters feedback_parameters; ??
+};
+
+void swap(SdpAudioFormat& a, SdpAudioFormat& b);
+std::ostream& operator<<(std::ostream& os, const SdpAudioFormat& saf);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc
new file mode 100644
index 00000000000..4c7445672ac
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
+#ifdef WEBRTC_CODEC_G722
+#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#endif
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
+
+namespace webrtc {
+
+namespace {
+
+struct NamedDecoderConstructor {
+ const char* name;
+ std::unique_ptr<AudioDecoder> (*constructor)(const SdpAudioFormat&);
+};
+
+std::unique_ptr<AudioDecoder> Unique(AudioDecoder* d) {
+ return std::unique_ptr<AudioDecoder>(d);
+}
+
+// TODO(kwiberg): These factory functions should probably be moved to each
+// decoder.
+NamedDecoderConstructor decoder_constructors[] = {
+ {"pcmu",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels >= 1
+ ? Unique(new AudioDecoderPcmU(format.num_channels))
+ : nullptr;
+ }},
+ {"pcma",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels >= 1
+ ? Unique(new AudioDecoderPcmA(format.num_channels))
+ : nullptr;
+ }},
+#ifdef WEBRTC_CODEC_ILBC
+ {"ilbc",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels == 1
+ ? Unique(new AudioDecoderIlbc)
+ : nullptr;
+ }},
+#endif
+#if defined(WEBRTC_CODEC_ISACFX)
+ {"isac",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 16000 && format.num_channels == 1
+ ? Unique(new AudioDecoderIsacFix)
+ : nullptr;
+ }},
+#elif defined(WEBRTC_CODEC_ISAC)
+ {"isac",
+ [](const SdpAudioFormat& format) {
+ return (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) &&
+ format.num_channels == 1
+ ? Unique(new AudioDecoderIsac)
+ : nullptr;
+ }},
+#endif
+ {"l16",
+ [](const SdpAudioFormat& format) {
+ return format.num_channels >= 1
+ ? Unique(new AudioDecoderPcm16B(format.num_channels))
+ : nullptr;
+ }},
+#ifdef WEBRTC_CODEC_G722
+ {"g722",
+ [](const SdpAudioFormat& format) {
+ if (format.clockrate_hz == 8000) {
+ if (format.num_channels == 1)
+ return Unique(new AudioDecoderG722);
+ if (format.num_channels == 2)
+ return Unique(new AudioDecoderG722Stereo);
+ }
+ return Unique(nullptr);
+ }},
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ {"opus",
+ [](const SdpAudioFormat& format) {
+ rtc::Optional<int> num_channels = [&] {
+ auto stereo = format.parameters.find("stereo");
+ if (stereo != format.parameters.end()) {
+ if (stereo->second == "0") {
+ return rtc::Optional<int>(1);
+ } else if (stereo->second == "1") {
+ return rtc::Optional<int>(2);
+ }
+ }
+ return rtc::Optional<int>();
+ }();
+ return format.clockrate_hz == 48000 && format.num_channels == 2 &&
+ num_channels
+ ? Unique(new AudioDecoderOpus(*num_channels))
+ : nullptr;
+ }},
+#endif
+};
+
+class BuiltinAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ std::vector<SdpAudioFormat> GetSupportedFormats() override {
+ FATAL() << "Not implemented yet!";
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) override {
+ for (const auto& dc : decoder_constructors) {
+ if (STR_CASE_CMP(format.name.c_str(), dc.name) == 0) {
+ return std::unique_ptr<AudioDecoder>(dc.constructor(format));
+ }
+ }
+ return nullptr;
+ }
+};
+
+} // namespace
+
+std::unique_ptr<AudioDecoderFactory> CreateBuiltinAudioDecoderFactory() {
+ return std::unique_ptr<AudioDecoderFactory>(new BuiltinAudioDecoderFactory);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h
new file mode 100644
index 00000000000..7234c160b5c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
+
+#include <memory>
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+
+namespace webrtc {
+
+// Creates a new factory that can create the built-in types of audio decoders.
+// NOTE: This function is still under development and may change without notice.
+std::unique_ptr<AudioDecoderFactory> CreateBuiltinAudioDecoderFactory();
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
index 3b48131a754..d2edcb5c265 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
@@ -21,33 +21,10 @@ namespace {
const int kMaxFrameSizeMs = 60;
-std::unique_ptr<CNG_enc_inst, CngInstDeleter> CreateCngInst(
- int sample_rate_hz,
- int sid_frame_interval_ms,
- int num_cng_coefficients) {
- CNG_enc_inst* ci;
- RTC_CHECK_EQ(0, WebRtcCng_CreateEnc(&ci));
- std::unique_ptr<CNG_enc_inst, CngInstDeleter> cng_inst(ci);
- RTC_CHECK_EQ(0,
- WebRtcCng_InitEnc(cng_inst.get(), sample_rate_hz,
- sid_frame_interval_ms, num_cng_coefficients));
- return cng_inst;
-}
-
} // namespace
AudioEncoderCng::Config::Config() = default;
-
-// TODO(kwiberg): =default this when Visual Studio learns to handle it.
-AudioEncoderCng::Config::Config(Config&& c)
- : num_channels(c.num_channels),
- payload_type(c.payload_type),
- speech_encoder(std::move(c.speech_encoder)),
- vad_mode(c.vad_mode),
- sid_frame_interval_ms(c.sid_frame_interval_ms),
- num_cng_coefficients(c.num_cng_coefficients),
- vad(c.vad) {}
-
+AudioEncoderCng::Config::Config(Config&&) = default;
AudioEncoderCng::Config::~Config() = default;
bool AudioEncoderCng::Config::IsOk() const {
@@ -75,20 +52,14 @@ AudioEncoderCng::AudioEncoderCng(Config&& config)
sid_frame_interval_ms_(config.sid_frame_interval_ms),
last_frame_active_(true),
vad_(config.vad ? std::unique_ptr<Vad>(config.vad)
- : CreateVad(config.vad_mode)) {
- cng_inst_ = CreateCngInst(SampleRateHz(), sid_frame_interval_ms_,
- num_cng_coefficients_);
+ : CreateVad(config.vad_mode)),
+ cng_encoder_(new ComfortNoiseEncoder(SampleRateHz(),
+ sid_frame_interval_ms_,
+ num_cng_coefficients_)) {
}
AudioEncoderCng::~AudioEncoderCng() = default;
-size_t AudioEncoderCng::MaxEncodedBytes() const {
- const size_t max_encoded_bytes_active = speech_encoder_->MaxEncodedBytes();
- const size_t max_encoded_bytes_passive =
- rtc::CheckedDivExact(kMaxFrameSizeMs, 10) * SamplesPer10msFrame();
- return std::max(max_encoded_bytes_active, max_encoded_bytes_passive);
-}
-
int AudioEncoderCng::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
@@ -187,8 +158,9 @@ void AudioEncoderCng::Reset() {
rtp_timestamps_.clear();
last_frame_active_ = true;
vad_->Reset();
- cng_inst_ = CreateCngInst(SampleRateHz(), sid_frame_interval_ms_,
- num_cng_coefficients_);
+ cng_encoder_.reset(
+ new ComfortNoiseEncoder(SampleRateHz(), sid_frame_interval_ms_,
+ num_cng_coefficients_));
}
bool AudioEncoderCng::SetFec(bool enable) {
@@ -215,38 +187,38 @@ void AudioEncoderCng::SetTargetBitrate(int bits_per_second) {
speech_encoder_->SetTargetBitrate(bits_per_second);
}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoderCng::ReclaimContainedEncoders() {
+ return rtc::ArrayView<std::unique_ptr<AudioEncoder>>(&speech_encoder_, 1);
+}
+
AudioEncoder::EncodedInfo AudioEncoderCng::EncodePassive(
size_t frames_to_encode,
rtc::Buffer* encoded) {
bool force_sid = last_frame_active_;
bool output_produced = false;
const size_t samples_per_10ms_frame = SamplesPer10msFrame();
- const size_t bytes_to_encode = frames_to_encode * samples_per_10ms_frame;
AudioEncoder::EncodedInfo info;
- encoded->AppendData(bytes_to_encode, [&] (rtc::ArrayView<uint8_t> encoded) {
- for (size_t i = 0; i < frames_to_encode; ++i) {
- // It's important not to pass &info.encoded_bytes directly to
- // WebRtcCng_Encode(), since later loop iterations may return zero in
- // that value, in which case we don't want to overwrite any value from
- // an earlier iteration.
- size_t encoded_bytes_tmp = 0;
- RTC_CHECK_GE(
- WebRtcCng_Encode(cng_inst_.get(),
- &speech_buffer_[i * samples_per_10ms_frame],
- samples_per_10ms_frame, encoded.data(),
- &encoded_bytes_tmp, force_sid),
- 0);
- if (encoded_bytes_tmp > 0) {
- RTC_CHECK(!output_produced);
- info.encoded_bytes = encoded_bytes_tmp;
- output_produced = true;
- force_sid = false;
- }
- }
-
- return info.encoded_bytes;
- });
+ for (size_t i = 0; i < frames_to_encode; ++i) {
+ // It's important not to pass &info.encoded_bytes directly to
+ // WebRtcCng_Encode(), since later loop iterations may return zero in
+ // that value, in which case we don't want to overwrite any value from
+ // an earlier iteration.
+ size_t encoded_bytes_tmp =
+ cng_encoder_->Encode(
+ rtc::ArrayView<const int16_t>(
+ &speech_buffer_[i * samples_per_10ms_frame],
+ samples_per_10ms_frame),
+ force_sid, encoded);
+
+ if (encoded_bytes_tmp > 0) {
+ RTC_CHECK(!output_produced);
+ info.encoded_bytes = encoded_bytes_tmp;
+ output_produced = true;
+ force_sid = false;
+ }
+ }
info.encoded_timestamp = rtp_timestamps_.front();
info.payload_type = cng_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
index 1384cd511ee..a895e69de44 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
@@ -21,11 +21,6 @@
namespace webrtc {
-// Deleter for use with unique_ptr.
-struct CngInstDeleter {
- void operator()(CNG_enc_inst* ptr) const { WebRtcCng_FreeEnc(ptr); }
-};
-
class Vad;
class AudioEncoderCng final : public AudioEncoder {
@@ -52,7 +47,6 @@ class AudioEncoderCng final : public AudioEncoder {
explicit AudioEncoderCng(Config&& config);
~AudioEncoderCng() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -69,6 +63,8 @@ class AudioEncoderCng final : public AudioEncoder {
void SetMaxPlaybackRate(int frequency_hz) override;
void SetProjectedPacketLossRate(double fraction) override;
void SetTargetBitrate(int target_bps) override;
+ rtc::ArrayView<std::unique_ptr<AudioEncoder>> ReclaimContainedEncoders()
+ override;
private:
EncodedInfo EncodePassive(size_t frames_to_encode,
@@ -85,7 +81,7 @@ class AudioEncoderCng final : public AudioEncoder {
std::vector<uint32_t> rtp_timestamps_;
bool last_frame_active_;
std::unique_ptr<Vad> vad_;
- std::unique_ptr<CNG_enc_inst, CngInstDeleter> cng_inst_;
+ std::unique_ptr<ComfortNoiseEncoder> cng_encoder_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCng);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
index 8f30d783ae4..eb6c6d3607e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
@@ -12,6 +12,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/vad/mock/mock_vad.h"
#include "webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h"
#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
@@ -25,7 +26,6 @@ using ::testing::Invoke;
namespace webrtc {
namespace {
-static const size_t kMockMaxEncodedBytes = 1000;
static const size_t kMaxNumSamples = 48 * 10 * 2; // 10 ms @ 48 kHz stereo.
static const size_t kMockReturnEncodedBytes = 17;
static const int kCngPayloadType = 18;
@@ -74,8 +74,6 @@ class AudioEncoderCngTest : public ::testing::Test {
// as long as it is smaller than 10.
EXPECT_CALL(*mock_encoder_, Max10MsFramesInAPacket())
.WillOnce(Return(1u));
- EXPECT_CALL(*mock_encoder_, MaxEncodedBytes())
- .WillRepeatedly(Return(kMockMaxEncodedBytes));
}
cng_.reset(new AudioEncoderCng(std::move(config)));
}
@@ -90,8 +88,8 @@ class AudioEncoderCngTest : public ::testing::Test {
}
// Expect |num_calls| calls to the encoder, all successful. The last call
- // claims to have encoded |kMockMaxEncodedBytes| bytes, and all the preceding
- // ones 0 bytes.
+ // claims to have encoded |kMockReturnEncodedBytes| bytes, and all the
+ // preceding ones 0 bytes.
void ExpectEncodeCalls(size_t num_calls) {
InSequence s;
AudioEncoder::EncodedInfo info;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
index c020f4740d4..bbff9f8edfe 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
@@ -18,9 +18,7 @@
'sources': [
'audio_encoder_cng.cc',
'audio_encoder_cng.h',
- 'cng_helpfuns.c',
- 'cng_helpfuns.h',
- 'webrtc_cng.c',
+ 'webrtc_cng.cc',
'webrtc_cng.h',
],
},
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c
deleted file mode 100644
index bc08d431a69..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "cng_helpfuns.h"
-
-#include "signal_processing_library.h"
-#include "webrtc/typedefs.h"
-#include "webrtc_cng.h"
-
-/* Values in |k| are Q15, and |a| Q12. */
-void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a) {
- int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1];
- int16_t *aptr, *aptr2, *anyptr;
- const int16_t *kptr;
- int m, i;
-
- kptr = k;
- *a = 4096; /* i.e., (Word16_MAX >> 3) + 1 */
- *any = *a;
- a[1] = (*k + 4) >> 3;
- for (m = 1; m < useOrder; m++) {
- kptr++;
- aptr = a;
- aptr++;
- aptr2 = &a[m];
- anyptr = any;
- anyptr++;
-
- any[m + 1] = (*kptr + 4) >> 3;
- for (i = 0; i < m; i++) {
- *anyptr++ = (*aptr++) +
- (int16_t)((((int32_t)(*aptr2--) * (int32_t) * kptr) + 16384) >> 15);
- }
-
- aptr = a;
- anyptr = any;
- for (i = 0; i < (m + 2); i++) {
- *aptr++ = *anyptr++;
- }
- }
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h
deleted file mode 100644
index a553a7615e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
-
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
index 1061dca69ac..95132a96178 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -7,11 +7,12 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
namespace webrtc {
@@ -21,7 +22,7 @@ enum {
kSidLongIntervalUpdate = 10000
};
-enum {
+enum : size_t {
kCNGNumParamsLow = 0,
kCNGNumParamsNormal = 8,
kCNGNumParamsHigh = WEBRTC_CNG_MAX_LPC_ORDER,
@@ -35,19 +36,13 @@ enum {
class CngTest : public ::testing::Test {
protected:
- CngTest();
virtual void SetUp();
- CNG_enc_inst* cng_enc_inst_;
- CNG_dec_inst* cng_dec_inst_;
+ void TestCngEncode(int sample_rate_hz, int quality);
+
int16_t speech_data_[640]; // Max size of CNG internal buffers.
};
-CngTest::CngTest()
- : cng_enc_inst_(NULL),
- cng_dec_inst_(NULL) {
-}
-
void CngTest::SetUp() {
FILE* input_file;
const std::string file_name =
@@ -60,289 +55,187 @@ void CngTest::SetUp() {
input_file = NULL;
}
-// Test failing Create.
-TEST_F(CngTest, CngCreateFail) {
- // Test to see that an invalid pointer is caught.
- EXPECT_EQ(-1, WebRtcCng_CreateEnc(NULL));
- EXPECT_EQ(-1, WebRtcCng_CreateDec(NULL));
-}
-
-// Test normal Create.
-TEST_F(CngTest, CngCreate) {
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_TRUE(cng_enc_inst_ != NULL);
- EXPECT_TRUE(cng_dec_inst_ != NULL);
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+void CngTest::TestCngEncode(int sample_rate_hz, int quality) {
+ const size_t num_samples_10ms = rtc::CheckedDivExact(sample_rate_hz, 100);
+ rtc::Buffer sid_data;
+
+ ComfortNoiseEncoder cng_encoder(sample_rate_hz, kSidNormalIntervalUpdate,
+ quality);
+ EXPECT_EQ(0U, cng_encoder.Encode(rtc::ArrayView<const int16_t>(
+ speech_data_, num_samples_10ms),
+ kNoSid, &sid_data));
+ EXPECT_EQ(static_cast<size_t>(quality + 1),
+ cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, num_samples_10ms),
+ kForceSid, &sid_data));
}
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Create CNG encoder, init with faulty values, free CNG encoder.
TEST_F(CngTest, CngInitFail) {
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
-
// Call with too few parameters.
- EXPECT_EQ(-1, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsLow));
- EXPECT_EQ(6130, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
-
+ EXPECT_DEATH({ ComfortNoiseEncoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsLow); }, "");
// Call with too many parameters.
- EXPECT_EQ(-1, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsTooHigh));
- EXPECT_EQ(6130, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
-
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
-}
-
-TEST_F(CngTest, CngEncode) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
-
- // 8 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 80, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 80, sid_data, &number_bytes, kForceSid));
-
- // 16 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
-
- // 32 kHz, Max number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 32000, kSidNormalIntervalUpdate,
- kCNGNumParamsHigh));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 320, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsHigh + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 320, sid_data, &number_bytes, kForceSid));
-
- // 48 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 48000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 480, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 480, sid_data, &number_bytes, kForceSid));
-
- // 64 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 64000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 640, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 640, sid_data, &number_bytes, kForceSid));
-
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+ EXPECT_DEATH({ ComfortNoiseEncoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsTooHigh); }, "");
}
// Encode Cng with too long input vector.
TEST_F(CngTest, CngEncodeTooLong) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create and init encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
+ rtc::Buffer sid_data;
+ // Create encoder.
+ ComfortNoiseEncoder cng_encoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
// Run encoder with too much data.
- EXPECT_EQ(-1, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 641, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(6140, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
+ EXPECT_DEATH(
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 641),
+ kNoSid, &sid_data),
+ "");
+}
+#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode8000) {
+ TestCngEncode(8000, kCNGNumParamsNormal);
}
-// Call encode without calling init.
-TEST_F(CngTest, CngEncodeNoInit) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+TEST_F(CngTest, CngEncode16000) {
+ TestCngEncode(16000, kCNGNumParamsNormal);
+}
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
+TEST_F(CngTest, CngEncode32000) {
+ TestCngEncode(32000, kCNGNumParamsHigh);
+}
- // Run encoder without calling init.
- EXPECT_EQ(-1, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 640, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(6120, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode48000) {
+ TestCngEncode(48000, kCNGNumParamsNormal);
+}
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode64000) {
+ TestCngEncode(64000, kCNGNumParamsNormal);
}
// Update SID parameters, for both 9 and 16 parameters.
TEST_F(CngTest, CngUpdateSid) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Run normal Encode and UpdateSid.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
+ cng_decoder.UpdateSid(sid_data);
// Reinit with new length.
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsHigh));
- WebRtcCng_InitDec(cng_dec_inst_);
+ cng_encoder.Reset(16000, kSidNormalIntervalUpdate, kCNGNumParamsHigh);
+ cng_decoder.Reset();
// Expect 0 because of unstable parameters after switching length.
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kForceSid));
- EXPECT_EQ(kCNGNumParamsHigh + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_ + 160, 160, sid_data, &number_bytes,
- kForceSid));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(0U,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
+ EXPECT_EQ(
+ kCNGNumParamsHigh + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_ + 160, 160),
+ kForceSid, &sid_data));
+ cng_decoder.UpdateSid(
+ rtc::ArrayView<const uint8_t>(sid_data.data(), kCNGNumParamsNormal + 1));
}
// Update SID parameters, with wrong parameters or without calling decode.
TEST_F(CngTest, CngUpdateSidErroneous) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
+ rtc::Buffer sid_data;
// Encode.
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
-
- // Update Sid before initializing decoder.
- EXPECT_EQ(-1, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
- EXPECT_EQ(6220, WebRtcCng_GetErrorCodeDec(cng_dec_inst_));
-
- // Initialize decoder.
- WebRtcCng_InitDec(cng_dec_inst_);
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
// First run with valid parameters, then with too many CNG parameters.
// The function will operate correctly by only reading the maximum number of
// parameters, skipping the extra.
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsTooHigh + 1));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, sid_data.size());
+ cng_decoder.UpdateSid(sid_data);
+
+ // Make sure the input buffer is large enough. Since Encode() appends data, we
+ // need to set the size manually only afterwards, or the buffer will be bigger
+ // than anticipated.
+ sid_data.SetSize(kCNGNumParamsTooHigh + 1);
+ cng_decoder.UpdateSid(sid_data);
}
// Test to generate cng data, by forcing SID. Both normal and faulty condition.
TEST_F(CngTest, CngGenerate) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ rtc::Buffer sid_data;
int16_t out_data[640];
- size_t number_bytes;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Normal Encode.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
// Normal UpdateSid.
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
+ cng_decoder.UpdateSid(sid_data);
// Two normal Generate, one with new_period.
- EXPECT_EQ(0, WebRtcCng_Generate(cng_dec_inst_, out_data, 640, 1));
- EXPECT_EQ(0, WebRtcCng_Generate(cng_dec_inst_, out_data, 640, 0));
+ EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 640), 1));
+ EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 640), 0));
// Call Genereate with too much data.
- EXPECT_EQ(-1, WebRtcCng_Generate(cng_dec_inst_, out_data, 641, 0));
- EXPECT_EQ(6140, WebRtcCng_GetErrorCodeDec(cng_dec_inst_));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_FALSE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 641), 0));
}
// Test automatic SID.
TEST_F(CngTest, CngAutoSid) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Normal Encode, 100 msec, where no SID data should be generated.
for (int i = 0; i < 10; i++) {
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
+ EXPECT_EQ(0U, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
// We have reached 100 msec, and SID data should be generated.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kNoSid));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
// Test automatic SID, with very short interval.
TEST_F(CngTest, CngAutoSidShort) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidShortIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidShortIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// First call will never generate SID, unless forced to.
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
+ EXPECT_EQ(0U, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
// Normal Encode, 100 msec, SID data should be generated all the time.
for (int i = 0; i < 10; i++) {
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kNoSid));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c
deleted file mode 100644
index 8dddc5c717d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c
+++ /dev/null
@@ -1,603 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc_cng.h"
-
-#include <string.h>
-#include <stdlib.h>
-
-#include "cng_helpfuns.h"
-#include "signal_processing_library.h"
-
-typedef struct WebRtcCngDecoder_ {
- uint32_t dec_seed;
- int32_t dec_target_energy;
- int32_t dec_used_energy;
- int16_t dec_target_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_used_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_filtstate[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_Efiltstate[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_EfiltstateLow[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_order;
- int16_t dec_target_scale_factor; /* Q29 */
- int16_t dec_used_scale_factor; /* Q29 */
- int16_t target_scale_factor; /* Q13 */
- int16_t errorcode;
- int16_t initflag;
-} WebRtcCngDecoder;
-
-typedef struct WebRtcCngEncoder_ {
- size_t enc_nrOfCoefs;
- int enc_sampfreq;
- int16_t enc_interval;
- int16_t enc_msSinceSID;
- int32_t enc_Energy;
- int16_t enc_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int32_t enc_corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- uint32_t enc_seed;
- int16_t errorcode;
- int16_t initflag;
-} WebRtcCngEncoder;
-
-const int32_t WebRtcCng_kDbov[94] = {
- 1081109975, 858756178, 682134279, 541838517, 430397633, 341876992,
- 271562548, 215709799, 171344384, 136103682, 108110997, 85875618,
- 68213428, 54183852, 43039763, 34187699, 27156255, 21570980,
- 17134438, 13610368, 10811100, 8587562, 6821343, 5418385,
- 4303976, 3418770, 2715625, 2157098, 1713444, 1361037,
- 1081110, 858756, 682134, 541839, 430398, 341877,
- 271563, 215710, 171344, 136104, 108111, 85876,
- 68213, 54184, 43040, 34188, 27156, 21571,
- 17134, 13610, 10811, 8588, 6821, 5418,
- 4304, 3419, 2716, 2157, 1713, 1361,
- 1081, 859, 682, 542, 430, 342,
- 272, 216, 171, 136, 108, 86,
- 68, 54, 43, 34, 27, 22,
- 17, 14, 11, 9, 7, 5,
- 4, 3, 3, 2, 2, 1,
- 1, 1, 1, 1
-};
-
-const int16_t WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
- 32702, 32636, 32570, 32505, 32439, 32374,
- 32309, 32244, 32179, 32114, 32049, 31985
-};
-
-/****************************************************************************
- * WebRtcCng_CreateEnc/Dec(...)
- *
- * These functions create an instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst) {
- if (cng_inst != NULL) {
- *cng_inst = (CNG_enc_inst*) malloc(sizeof(WebRtcCngEncoder));
- if (*cng_inst != NULL) {
- (*(WebRtcCngEncoder**) cng_inst)->errorcode = 0;
- (*(WebRtcCngEncoder**) cng_inst)->initflag = 0;
-
- /* Needed to get the right function pointers in SPLIB. */
- WebRtcSpl_Init();
-
- return 0;
- } else {
- /* The memory could not be allocated. */
- return -1;
- }
- } else {
- /* The input pointer is invalid (NULL). */
- return -1;
- }
-}
-
-int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst) {
- if (cng_inst != NULL ) {
- *cng_inst = (CNG_dec_inst*) malloc(sizeof(WebRtcCngDecoder));
- if (*cng_inst != NULL ) {
- (*(WebRtcCngDecoder**) cng_inst)->errorcode = 0;
- (*(WebRtcCngDecoder**) cng_inst)->initflag = 0;
-
- /* Needed to get the right function pointers in SPLIB. */
- WebRtcSpl_Init();
-
- return 0;
- } else {
- /* The memory could not be allocated */
- return -1;
- }
- } else {
- /* The input pointer is invalid (NULL). */
- return -1;
- }
-}
-
-/****************************************************************************
- * WebRtcCng_InitEnc/Dec(...)
- *
- * This function initializes a instance
- *
- * Input:
- * - cng_inst : Instance that should be initialized
- *
- * - fs : 8000 for narrowband and 16000 for wideband
- * - interval : generate SID data every interval ms
- * - quality : TBD
- *
- * Output:
- * - cng_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
- int16_t quality) {
- int i;
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
- memset(inst, 0, sizeof(WebRtcCngEncoder));
-
- /* Check LPC order */
- if (quality > WEBRTC_CNG_MAX_LPC_ORDER || quality <= 0) {
- inst->errorcode = CNG_DISALLOWED_LPC_ORDER;
- return -1;
- }
-
- inst->enc_sampfreq = fs;
- inst->enc_interval = interval;
- inst->enc_nrOfCoefs = quality;
- inst->enc_msSinceSID = 0;
- inst->enc_seed = 7777; /* For debugging only. */
- inst->enc_Energy = 0;
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER + 1); i++) {
- inst->enc_reflCoefs[i] = 0;
- inst->enc_corrVector[i] = 0;
- }
- inst->initflag = 1;
-
- return 0;
-}
-
-void WebRtcCng_InitDec(CNG_dec_inst* cng_inst) {
- int i;
-
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
-
- memset(inst, 0, sizeof(WebRtcCngDecoder));
- inst->dec_seed = 7777; /* For debugging only. */
- inst->dec_order = 5;
- inst->dec_target_scale_factor = 0;
- inst->dec_used_scale_factor = 0;
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER + 1); i++) {
- inst->dec_filtstate[i] = 0;
- inst->dec_target_reflCoefs[i] = 0;
- inst->dec_used_reflCoefs[i] = 0;
- }
- inst->dec_target_reflCoefs[0] = 0;
- inst->dec_used_reflCoefs[0] = 0;
- inst->dec_used_energy = 0;
- inst->initflag = 1;
-}
-
-/****************************************************************************
- * WebRtcCng_FreeEnc/Dec(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst) {
- free(cng_inst);
- return 0;
-}
-
-int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst) {
- free(cng_inst);
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_Encode(...)
- *
- * These functions analyzes background noise
- *
- * Input:
- * - cng_inst : Pointer to created instance
- * - speech : Signal (noise) to be analyzed
- * - nrOfSamples : Size of speech vector
- * - bytesOut : Nr of bytes to transmit, might be 0
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
- size_t nrOfSamples, uint8_t* SIDdata,
- size_t* bytesOut, int16_t forceSID) {
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
-
- int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int32_t corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t hanningW[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t ReflBeta = 19661; /* 0.6 in q15. */
- int16_t ReflBetaComp = 13107; /* 0.4 in q15. */
- int32_t outEnergy;
- int outShifts;
- size_t i;
- int stab;
- int acorrScale;
- size_t index;
- size_t ind, factor;
- int32_t* bptr;
- int32_t blo, bhi;
- int16_t negate;
- const int16_t* aptr;
- int16_t speechBuf[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
-
- /* Check if encoder initiated. */
- if (inst->initflag != 1) {
- inst->errorcode = CNG_ENCODER_NOT_INITIATED;
- return -1;
- }
-
- /* Check framesize. */
- if (nrOfSamples > WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
- inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
- return -1;
- }
-
- for (i = 0; i < nrOfSamples; i++) {
- speechBuf[i] = speech[i];
- }
-
- factor = nrOfSamples;
-
- /* Calculate energy and a coefficients. */
- outEnergy = WebRtcSpl_Energy(speechBuf, nrOfSamples, &outShifts);
- while (outShifts > 0) {
- /* We can only do 5 shifts without destroying accuracy in
- * division factor. */
- if (outShifts > 5) {
- outEnergy <<= (outShifts - 5);
- outShifts = 5;
- } else {
- factor /= 2;
- outShifts--;
- }
- }
- outEnergy = WebRtcSpl_DivW32W16(outEnergy, (int16_t)factor);
-
- if (outEnergy > 1) {
- /* Create Hanning Window. */
- WebRtcSpl_GetHanningWindow(hanningW, nrOfSamples / 2);
- for (i = 0; i < (nrOfSamples / 2); i++)
- hanningW[nrOfSamples - i - 1] = hanningW[i];
-
- WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, nrOfSamples,
- 14);
-
- WebRtcSpl_AutoCorrelation(speechBuf, nrOfSamples, inst->enc_nrOfCoefs,
- corrVector, &acorrScale);
-
- if (*corrVector == 0)
- *corrVector = WEBRTC_SPL_WORD16_MAX;
-
- /* Adds the bandwidth expansion. */
- aptr = WebRtcCng_kCorrWindow;
- bptr = corrVector;
-
- /* (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700). */
- for (ind = 0; ind < inst->enc_nrOfCoefs; ind++) {
- /* The below code multiplies the 16 b corrWindow values (Q15) with
- * the 32 b corrvector (Q0) and shifts the result down 15 steps. */
- negate = *bptr < 0;
- if (negate)
- *bptr = -*bptr;
-
- blo = (int32_t) * aptr * (*bptr & 0xffff);
- bhi = ((blo >> 16) & 0xffff)
- + ((int32_t)(*aptr++) * ((*bptr >> 16) & 0xffff));
- blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
-
- *bptr = (((bhi >> 16) & 0x7fff) << 17) | ((uint32_t) blo >> 15);
- if (negate)
- *bptr = -*bptr;
- bptr++;
- }
- /* End of bandwidth expansion. */
-
- stab = WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs,
- inst->enc_nrOfCoefs);
-
- if (!stab) {
- /* Disregard from this frame */
- *bytesOut = 0;
- return 0;
- }
-
- } else {
- for (i = 0; i < inst->enc_nrOfCoefs; i++)
- refCs[i] = 0;
- }
-
- if (forceSID) {
- /* Read instantaneous values instead of averaged. */
- for (i = 0; i < inst->enc_nrOfCoefs; i++)
- inst->enc_reflCoefs[i] = refCs[i];
- inst->enc_Energy = outEnergy;
- } else {
- /* Average history with new values. */
- for (i = 0; i < (inst->enc_nrOfCoefs); i++) {
- inst->enc_reflCoefs[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->enc_reflCoefs[i], ReflBeta, 15);
- inst->enc_reflCoefs[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- refCs[i], ReflBetaComp, 15);
- }
- inst->enc_Energy = (outEnergy >> 2) + (inst->enc_Energy >> 1)
- + (inst->enc_Energy >> 2);
- }
-
- if (inst->enc_Energy < 1) {
- inst->enc_Energy = 1;
- }
-
- if ((inst->enc_msSinceSID > (inst->enc_interval - 1)) || forceSID) {
-
- /* Search for best dbov value. */
- index = 0;
- for (i = 1; i < 93; i++) {
- /* Always round downwards. */
- if ((inst->enc_Energy - WebRtcCng_kDbov[i]) > 0) {
- index = i;
- break;
- }
- }
- if ((i == 93) && (index == 0))
- index = 94;
- SIDdata[0] = (uint8_t)index;
-
- /* Quantize coefficients with tweak for WebRtc implementation of RFC3389. */
- if (inst->enc_nrOfCoefs == WEBRTC_CNG_MAX_LPC_ORDER) {
- for (i = 0; i < inst->enc_nrOfCoefs; i++) {
- /* Q15 to Q7 with rounding. */
- SIDdata[i + 1] = ((inst->enc_reflCoefs[i] + 128) >> 8);
- }
- } else {
- for (i = 0; i < inst->enc_nrOfCoefs; i++) {
- /* Q15 to Q7 with rounding. */
- SIDdata[i + 1] = (127 + ((inst->enc_reflCoefs[i] + 128) >> 8));
- }
- }
-
- inst->enc_msSinceSID = 0;
- *bytesOut = inst->enc_nrOfCoefs + 1;
-
- inst->enc_msSinceSID +=
- (int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
- return (int)(inst->enc_nrOfCoefs + 1);
- } else {
- inst->enc_msSinceSID +=
- (int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
- *bytesOut = 0;
- return 0;
- }
-}
-
-/****************************************************************************
- * WebRtcCng_UpdateSid(...)
- *
- * These functions updates the CN state, when a new SID packet arrives
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - SID : SID packet, all headers removed
- * - length : Length in bytes of SID packet
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
- size_t length) {
-
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
- int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
- int32_t targetEnergy;
- int i;
-
- if (inst->initflag != 1) {
- inst->errorcode = CNG_DECODER_NOT_INITIATED;
- return -1;
- }
-
- /* Throw away reflection coefficients of higher order than we can handle. */
- if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
- length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
-
- inst->dec_order = (int16_t)length - 1;
-
- if (SID[0] > 93)
- SID[0] = 93;
- targetEnergy = WebRtcCng_kDbov[SID[0]];
- /* Take down target energy to 75%. */
- targetEnergy = targetEnergy >> 1;
- targetEnergy += targetEnergy >> 2;
-
- inst->dec_target_energy = targetEnergy;
-
- /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389. */
- if (inst->dec_order == WEBRTC_CNG_MAX_LPC_ORDER) {
- for (i = 0; i < (inst->dec_order); i++) {
- refCs[i] = SID[i + 1] << 8; /* Q7 to Q15*/
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
- } else {
- for (i = 0; i < (inst->dec_order); i++) {
- refCs[i] = (SID[i + 1] - 127) << 8; /* Q7 to Q15. */
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
- }
-
- for (i = (inst->dec_order); i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
- refCs[i] = 0;
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_Generate(...)
- *
- * These functions generates CN data when needed
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - outData : pointer to area to write CN data
- * - nrOfSamples : How much data to generate
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
- size_t nrOfSamples, int16_t new_period) {
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
-
- size_t i;
- int16_t excitation[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t low[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t lpPoly[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t ReflBetaStd = 26214; /* 0.8 in q15. */
- int16_t ReflBetaCompStd = 6553; /* 0.2 in q15. */
- int16_t ReflBetaNewP = 19661; /* 0.6 in q15. */
- int16_t ReflBetaCompNewP = 13107; /* 0.4 in q15. */
- int16_t Beta, BetaC, tmp1, tmp2, tmp3;
- int32_t targetEnergy;
- int16_t En;
- int16_t temp16;
-
- if (nrOfSamples > WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
- inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
- return -1;
- }
-
- if (new_period) {
- inst->dec_used_scale_factor = inst->dec_target_scale_factor;
- Beta = ReflBetaNewP;
- BetaC = ReflBetaCompNewP;
- } else {
- Beta = ReflBetaStd;
- BetaC = ReflBetaCompStd;
- }
-
- /* Here we use a 0.5 weighting, should possibly be modified to 0.6. */
- tmp1 = inst->dec_used_scale_factor << 2; /* Q13->Q15 */
- tmp2 = inst->dec_target_scale_factor << 2; /* Q13->Q15 */
- tmp3 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp1, Beta, 15);
- tmp3 += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp2, BetaC, 15);
- inst->dec_used_scale_factor = tmp3 >> 2; /* Q15->Q13 */
-
- inst->dec_used_energy = inst->dec_used_energy >> 1;
- inst->dec_used_energy += inst->dec_target_energy >> 1;
-
- /* Do the same for the reflection coeffs. */
- for (i = 0; i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
- inst->dec_used_reflCoefs[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_used_reflCoefs[i], Beta, 15);
- inst->dec_used_reflCoefs[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_target_reflCoefs[i], BetaC, 15);
- }
-
- /* Compute the polynomial coefficients. */
- WebRtcCng_K2a16(inst->dec_used_reflCoefs, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
-
-
- targetEnergy = inst->dec_used_energy;
-
- /* Calculate scaling factor based on filter energy. */
- En = 8192; /* 1.0 in Q13. */
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER); i++) {
-
- /* Floating point value for reference.
- E *= 1.0 - (inst->dec_used_reflCoefs[i] / 32768.0) *
- (inst->dec_used_reflCoefs[i] / 32768.0);
- */
-
- /* Same in fixed point. */
- /* K(i).^2 in Q15. */
- temp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_used_reflCoefs[i], inst->dec_used_reflCoefs[i], 15);
- /* 1 - K(i).^2 in Q15. */
- temp16 = 0x7fff - temp16;
- En = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(En, temp16, 15);
- }
-
- /* float scaling= sqrt(E * inst->dec_target_energy / (1 << 24)); */
-
- /* Calculate sqrt(En * target_energy / excitation energy) */
- targetEnergy = WebRtcSpl_Sqrt(inst->dec_used_energy);
-
- En = (int16_t) WebRtcSpl_Sqrt(En) << 6;
- En = (En * 3) >> 1; /* 1.5 estimates sqrt(2). */
- inst->dec_used_scale_factor = (int16_t)((En * targetEnergy) >> 12);
-
- /* Generate excitation. */
- /* Excitation energy per sample is 2.^24 - Q13 N(0,1). */
- for (i = 0; i < nrOfSamples; i++) {
- excitation[i] = WebRtcSpl_RandN(&inst->dec_seed) >> 1;
- }
-
- /* Scale to correct energy. */
- WebRtcSpl_ScaleVector(excitation, excitation, inst->dec_used_scale_factor,
- nrOfSamples, 13);
-
- /* |lpPoly| - Coefficients in Q12.
- * |excitation| - Speech samples.
- * |nst->dec_filtstate| - State preservation.
- * |outData| - Filtered speech samples. */
- WebRtcSpl_FilterAR(lpPoly, WEBRTC_CNG_MAX_LPC_ORDER + 1, excitation,
- nrOfSamples, inst->dec_filtstate, WEBRTC_CNG_MAX_LPC_ORDER,
- inst->dec_filtstateLow, WEBRTC_CNG_MAX_LPC_ORDER, outData,
- low, nrOfSamples);
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_GetErrorCodeEnc/Dec(...)
- *
- * This functions can be used to check the error code of a CNG instance. When
- * a function returns -1 a error code will be set for that instance. The
- * function below extract the code of the last error that occured in the
- * specified instance.
- *
- * Input:
- * - CNG_inst : CNG enc/dec instance
- *
- * Return value : Error code
- */
-int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst) {
- /* Typecast pointer to real structure. */
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
- return inst->errorcode;
-}
-
-int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst) {
- /* Typecast pointer to real structure. */
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
- return inst->errorcode;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc
new file mode 100644
index 00000000000..b4da260dba2
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc
@@ -0,0 +1,442 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+
+#include <algorithm>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+namespace {
+
+const size_t kCngMaxOutsizeOrder = 640;
+
+// TODO(ossu): Rename the left-over WebRtcCng according to style guide.
+void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a);
+
+const int32_t WebRtcCng_kDbov[94] = {
+ 1081109975, 858756178, 682134279, 541838517, 430397633, 341876992,
+ 271562548, 215709799, 171344384, 136103682, 108110997, 85875618,
+ 68213428, 54183852, 43039763, 34187699, 27156255, 21570980,
+ 17134438, 13610368, 10811100, 8587562, 6821343, 5418385,
+ 4303976, 3418770, 2715625, 2157098, 1713444, 1361037,
+ 1081110, 858756, 682134, 541839, 430398, 341877,
+ 271563, 215710, 171344, 136104, 108111, 85876,
+ 68213, 54184, 43040, 34188, 27156, 21571,
+ 17134, 13610, 10811, 8588, 6821, 5418,
+ 4304, 3419, 2716, 2157, 1713, 1361,
+ 1081, 859, 682, 542, 430, 342,
+ 272, 216, 171, 136, 108, 86,
+ 68, 54, 43, 34, 27, 22,
+ 17, 14, 11, 9, 7, 5,
+ 4, 3, 3, 2, 2, 1,
+ 1, 1, 1, 1
+};
+
+const int16_t WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
+ 32702, 32636, 32570, 32505, 32439, 32374,
+ 32309, 32244, 32179, 32114, 32049, 31985
+};
+
+} // namespace
+
+ComfortNoiseDecoder::ComfortNoiseDecoder() {
+ /* Needed to get the right function pointers in SPLIB. */
+ WebRtcSpl_Init();
+ Reset();
+}
+
+void ComfortNoiseDecoder::Reset() {
+ dec_seed_ = 7777; /* For debugging only. */
+ dec_target_energy_ = 0;
+ dec_used_energy_ = 0;
+ for (auto& c : dec_target_reflCoefs_)
+ c = 0;
+ for (auto& c : dec_used_reflCoefs_)
+ c = 0;
+ for (auto& c : dec_filtstate_)
+ c = 0;
+ for (auto& c : dec_filtstateLow_)
+ c = 0;
+ dec_order_ = 5;
+ dec_target_scale_factor_ = 0;
+ dec_used_scale_factor_ = 0;
+}
+
+void ComfortNoiseDecoder::UpdateSid(rtc::ArrayView<const uint8_t> sid) {
+ int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
+ int32_t targetEnergy;
+ size_t length = sid.size();
+ /* Throw away reflection coefficients of higher order than we can handle. */
+ if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
+ length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
+
+ dec_order_ = static_cast<uint16_t>(length - 1);
+
+ uint8_t sid0 = std::min<uint8_t>(sid[0], 93);
+ targetEnergy = WebRtcCng_kDbov[sid0];
+ /* Take down target energy to 75%. */
+ targetEnergy = targetEnergy >> 1;
+ targetEnergy += targetEnergy >> 2;
+
+ dec_target_energy_ = targetEnergy;
+
+ /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389. */
+ if (dec_order_ == WEBRTC_CNG_MAX_LPC_ORDER) {
+ for (size_t i = 0; i < (dec_order_); i++) {
+ refCs[i] = sid[i + 1] << 8; /* Q7 to Q15*/
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+ } else {
+ for (size_t i = 0; i < (dec_order_); i++) {
+ refCs[i] = (sid[i + 1] - 127) << 8; /* Q7 to Q15. */
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+ }
+
+ for (size_t i = (dec_order_); i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
+ refCs[i] = 0;
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+}
+
+bool ComfortNoiseDecoder::Generate(rtc::ArrayView<int16_t> out_data,
+ bool new_period) {
+ int16_t excitation[kCngMaxOutsizeOrder];
+ int16_t low[kCngMaxOutsizeOrder];
+ int16_t lpPoly[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t ReflBetaStd = 26214; /* 0.8 in q15. */
+ int16_t ReflBetaCompStd = 6553; /* 0.2 in q15. */
+ int16_t ReflBetaNewP = 19661; /* 0.6 in q15. */
+ int16_t ReflBetaCompNewP = 13107; /* 0.4 in q15. */
+ int16_t Beta, BetaC, tmp1, tmp2, tmp3;
+ int32_t targetEnergy;
+ int16_t En;
+ int16_t temp16;
+ const size_t num_samples = out_data.size();
+
+ if (num_samples > kCngMaxOutsizeOrder) {
+ return false;
+ }
+
+ if (new_period) {
+ dec_used_scale_factor_ = dec_target_scale_factor_;
+ Beta = ReflBetaNewP;
+ BetaC = ReflBetaCompNewP;
+ } else {
+ Beta = ReflBetaStd;
+ BetaC = ReflBetaCompStd;
+ }
+
+ /* Here we use a 0.5 weighting, should possibly be modified to 0.6. */
+ tmp1 = dec_used_scale_factor_ << 2; /* Q13->Q15 */
+ tmp2 = dec_target_scale_factor_ << 2; /* Q13->Q15 */
+ tmp3 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp1, Beta, 15);
+ tmp3 += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp2, BetaC, 15);
+ dec_used_scale_factor_ = tmp3 >> 2; /* Q15->Q13 */
+
+ dec_used_energy_ = dec_used_energy_ >> 1;
+ dec_used_energy_ += dec_target_energy_ >> 1;
+
+ /* Do the same for the reflection coeffs. */
+ for (size_t i = 0; i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
+ dec_used_reflCoefs_[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_used_reflCoefs_[i], Beta, 15);
+ dec_used_reflCoefs_[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_target_reflCoefs_[i], BetaC, 15);
+ }
+
+ /* Compute the polynomial coefficients. */
+ WebRtcCng_K2a16(dec_used_reflCoefs_, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
+
+
+ targetEnergy = dec_used_energy_;
+
+ /* Calculate scaling factor based on filter energy. */
+ En = 8192; /* 1.0 in Q13. */
+ for (size_t i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER); i++) {
+ /* Floating point value for reference.
+ E *= 1.0 - (dec_used_reflCoefs_[i] / 32768.0) *
+ (dec_used_reflCoefs_[i] / 32768.0);
+ */
+
+ /* Same in fixed point. */
+ /* K(i).^2 in Q15. */
+ temp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_used_reflCoefs_[i], dec_used_reflCoefs_[i], 15);
+ /* 1 - K(i).^2 in Q15. */
+ temp16 = 0x7fff - temp16;
+ En = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(En, temp16, 15);
+ }
+
+ /* float scaling= sqrt(E * dec_target_energy_ / (1 << 24)); */
+
+ /* Calculate sqrt(En * target_energy / excitation energy) */
+ targetEnergy = WebRtcSpl_Sqrt(dec_used_energy_);
+
+ En = (int16_t) WebRtcSpl_Sqrt(En) << 6;
+ En = (En * 3) >> 1; /* 1.5 estimates sqrt(2). */
+ dec_used_scale_factor_ = (int16_t)((En * targetEnergy) >> 12);
+
+ /* Generate excitation. */
+ /* Excitation energy per sample is 2.^24 - Q13 N(0,1). */
+ for (size_t i = 0; i < num_samples; i++) {
+ excitation[i] = WebRtcSpl_RandN(&dec_seed_) >> 1;
+ }
+
+ /* Scale to correct energy. */
+ WebRtcSpl_ScaleVector(excitation, excitation, dec_used_scale_factor_,
+ num_samples, 13);
+
+ /* |lpPoly| - Coefficients in Q12.
+ * |excitation| - Speech samples.
+ * |nst->dec_filtstate| - State preservation.
+ * |out_data| - Filtered speech samples. */
+ WebRtcSpl_FilterAR(lpPoly, WEBRTC_CNG_MAX_LPC_ORDER + 1, excitation,
+ num_samples, dec_filtstate_, WEBRTC_CNG_MAX_LPC_ORDER,
+ dec_filtstateLow_, WEBRTC_CNG_MAX_LPC_ORDER,
+ out_data.data(), low, num_samples);
+
+ return true;
+}
+
+ComfortNoiseEncoder::ComfortNoiseEncoder(int fs, int interval, int quality)
+ : enc_nrOfCoefs_(quality),
+ enc_sampfreq_(fs),
+ enc_interval_(interval),
+ enc_msSinceSid_(0),
+ enc_Energy_(0),
+ enc_reflCoefs_{0},
+ enc_corrVector_{0},
+ enc_seed_(7777) /* For debugging only. */ {
+ RTC_CHECK(quality <= WEBRTC_CNG_MAX_LPC_ORDER && quality > 0);
+ /* Needed to get the right function pointers in SPLIB. */
+ WebRtcSpl_Init();
+}
+
+void ComfortNoiseEncoder::Reset(int fs, int interval, int quality) {
+ RTC_CHECK(quality <= WEBRTC_CNG_MAX_LPC_ORDER && quality > 0);
+ enc_nrOfCoefs_ = quality;
+ enc_sampfreq_ = fs;
+ enc_interval_ = interval;
+ enc_msSinceSid_ = 0;
+ enc_Energy_ = 0;
+ for (auto& c : enc_reflCoefs_)
+ c = 0;
+ for (auto& c : enc_corrVector_)
+ c = 0;
+ enc_seed_ = 7777; /* For debugging only. */
+}
+
+size_t ComfortNoiseEncoder::Encode(rtc::ArrayView<const int16_t> speech,
+ bool force_sid,
+ rtc::Buffer* output) {
+ int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int32_t corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t hanningW[kCngMaxOutsizeOrder];
+ int16_t ReflBeta = 19661; /* 0.6 in q15. */
+ int16_t ReflBetaComp = 13107; /* 0.4 in q15. */
+ int32_t outEnergy;
+ int outShifts;
+ size_t i;
+ int stab;
+ int acorrScale;
+ size_t index;
+ size_t ind, factor;
+ int32_t* bptr;
+ int32_t blo, bhi;
+ int16_t negate;
+ const int16_t* aptr;
+ int16_t speechBuf[kCngMaxOutsizeOrder];
+
+ const size_t num_samples = speech.size();
+ RTC_CHECK_LE(num_samples, static_cast<size_t>(kCngMaxOutsizeOrder));
+
+ for (i = 0; i < num_samples; i++) {
+ speechBuf[i] = speech[i];
+ }
+
+ factor = num_samples;
+
+ /* Calculate energy and a coefficients. */
+ outEnergy = WebRtcSpl_Energy(speechBuf, num_samples, &outShifts);
+ while (outShifts > 0) {
+ /* We can only do 5 shifts without destroying accuracy in
+ * division factor. */
+ if (outShifts > 5) {
+ outEnergy <<= (outShifts - 5);
+ outShifts = 5;
+ } else {
+ factor /= 2;
+ outShifts--;
+ }
+ }
+ outEnergy = WebRtcSpl_DivW32W16(outEnergy, (int16_t)factor);
+
+ if (outEnergy > 1) {
+ /* Create Hanning Window. */
+ WebRtcSpl_GetHanningWindow(hanningW, num_samples / 2);
+ for (i = 0; i < (num_samples / 2); i++)
+ hanningW[num_samples - i - 1] = hanningW[i];
+
+ WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, num_samples,
+ 14);
+
+ WebRtcSpl_AutoCorrelation(speechBuf, num_samples, enc_nrOfCoefs_,
+ corrVector, &acorrScale);
+
+ if (*corrVector == 0)
+ *corrVector = WEBRTC_SPL_WORD16_MAX;
+
+ /* Adds the bandwidth expansion. */
+ aptr = WebRtcCng_kCorrWindow;
+ bptr = corrVector;
+
+ /* (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700). */
+ for (ind = 0; ind < enc_nrOfCoefs_; ind++) {
+ /* The below code multiplies the 16 b corrWindow values (Q15) with
+ * the 32 b corrvector (Q0) and shifts the result down 15 steps. */
+ negate = *bptr < 0;
+ if (negate)
+ *bptr = -*bptr;
+
+ blo = (int32_t) * aptr * (*bptr & 0xffff);
+ bhi = ((blo >> 16) & 0xffff)
+ + ((int32_t)(*aptr++) * ((*bptr >> 16) & 0xffff));
+ blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
+
+ *bptr = (((bhi >> 16) & 0x7fff) << 17) | ((uint32_t) blo >> 15);
+ if (negate)
+ *bptr = -*bptr;
+ bptr++;
+ }
+ /* End of bandwidth expansion. */
+
+ stab = WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs,
+ enc_nrOfCoefs_);
+
+ if (!stab) {
+ /* Disregard from this frame */
+ return 0;
+ }
+
+ } else {
+ for (i = 0; i < enc_nrOfCoefs_; i++)
+ refCs[i] = 0;
+ }
+
+ if (force_sid) {
+ /* Read instantaneous values instead of averaged. */
+ for (i = 0; i < enc_nrOfCoefs_; i++)
+ enc_reflCoefs_[i] = refCs[i];
+ enc_Energy_ = outEnergy;
+ } else {
+ /* Average history with new values. */
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ enc_reflCoefs_[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ enc_reflCoefs_[i], ReflBeta, 15);
+ enc_reflCoefs_[i] +=
+ (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(refCs[i], ReflBetaComp, 15);
+ }
+ enc_Energy_ =
+ (outEnergy >> 2) + (enc_Energy_ >> 1) + (enc_Energy_ >> 2);
+ }
+
+ if (enc_Energy_ < 1) {
+ enc_Energy_ = 1;
+ }
+
+ if ((enc_msSinceSid_ > (enc_interval_ - 1)) || force_sid) {
+ /* Search for best dbov value. */
+ index = 0;
+ for (i = 1; i < 93; i++) {
+ /* Always round downwards. */
+ if ((enc_Energy_ - WebRtcCng_kDbov[i]) > 0) {
+ index = i;
+ break;
+ }
+ }
+ if ((i == 93) && (index == 0))
+ index = 94;
+
+ const size_t output_coefs = enc_nrOfCoefs_ + 1;
+ output->AppendData(output_coefs, [&] (rtc::ArrayView<uint8_t> output) {
+ output[0] = (uint8_t)index;
+
+ /* Quantize coefficients with tweak for WebRtc implementation of
+ * RFC3389. */
+ if (enc_nrOfCoefs_ == WEBRTC_CNG_MAX_LPC_ORDER) {
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ /* Q15 to Q7 with rounding. */
+ output[i + 1] = ((enc_reflCoefs_[i] + 128) >> 8);
+ }
+ } else {
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ /* Q15 to Q7 with rounding. */
+ output[i + 1] = (127 + ((enc_reflCoefs_[i] + 128) >> 8));
+ }
+ }
+
+ return output_coefs;
+ });
+
+ enc_msSinceSid_ =
+ static_cast<int16_t>((1000 * num_samples) / enc_sampfreq_);
+ return output_coefs;
+ } else {
+ enc_msSinceSid_ +=
+ static_cast<int16_t>((1000 * num_samples) / enc_sampfreq_);
+ return 0;
+ }
+}
+
+namespace {
+/* Values in |k| are Q15, and |a| Q12. */
+void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a) {
+ int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1];
+ int16_t* aptr;
+ int16_t* aptr2;
+ int16_t* anyptr;
+ const int16_t* kptr;
+ int m, i;
+
+ kptr = k;
+ *a = 4096; /* i.e., (Word16_MAX >> 3) + 1 */
+ *any = *a;
+ a[1] = (*k + 4) >> 3;
+ for (m = 1; m < useOrder; m++) {
+ kptr++;
+ aptr = a;
+ aptr++;
+ aptr2 = &a[m];
+ anyptr = any;
+ anyptr++;
+
+ any[m + 1] = (*kptr + 4) >> 3;
+ for (i = 0; i < m; i++) {
+ *anyptr++ =
+ (*aptr++) +
+ (int16_t)((((int32_t)(*aptr2--) * (int32_t)*kptr) + 16384) >> 15);
+ }
+
+ aptr = a;
+ anyptr = any;
+ for (i = 0; i < (m + 2); i++) {
+ *aptr++ = *anyptr++;
+ }
+ }
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
index 64bea1e26f6..fb0a53df270 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
@@ -12,152 +12,88 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
-#include <stddef.h>
-#include "webrtc/typedefs.h"
+#include <cstddef>
-#ifdef __cplusplus
-extern "C" {
-#endif
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/typedefs.h"
#define WEBRTC_CNG_MAX_LPC_ORDER 12
-#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
-
-/* Define Error codes. */
-
-/* 6100 Encoder */
-#define CNG_ENCODER_NOT_INITIATED 6120
-#define CNG_DISALLOWED_LPC_ORDER 6130
-#define CNG_DISALLOWED_FRAME_SIZE 6140
-#define CNG_DISALLOWED_SAMPLING_FREQUENCY 6150
-/* 6200 Decoder */
-#define CNG_DECODER_NOT_INITIATED 6220
-
-typedef struct WebRtcCngEncInst CNG_enc_inst;
-typedef struct WebRtcCngDecInst CNG_dec_inst;
-
-/****************************************************************************
- * WebRtcCng_CreateEnc/Dec(...)
- *
- * These functions create an instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst);
-int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst);
-
-/****************************************************************************
- * WebRtcCng_InitEnc/Dec(...)
- *
- * This function initializes a instance
- *
- * Input:
- * - cng_inst : Instance that should be initialized
- *
- * - fs : 8000 for narrowband and 16000 for wideband
- * - interval : generate SID data every interval ms
- * - quality : Number of refl. coefs, maximum allowed is 12
- *
- * Output:
- * - cng_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
- int16_t quality);
-void WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_FreeEnc/Dec(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_Encode(...)
- *
- * These functions analyzes background noise
- *
- * Input:
- * - cng_inst : Pointer to created instance
- * - speech : Signal to be analyzed
- * - nrOfSamples : Size of speech vector
- * - forceSID : not zero to force SID frame and reset
- *
- * Output:
- * - bytesOut : Nr of bytes to transmit, might be 0
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
- size_t nrOfSamples, uint8_t* SIDdata,
- size_t* bytesOut, int16_t forceSID);
-
-/****************************************************************************
- * WebRtcCng_UpdateSid(...)
- *
- * These functions updates the CN state, when a new SID packet arrives
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - SID : SID packet, all headers removed
- * - length : Length in bytes of SID packet
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
- size_t length);
-
-/****************************************************************************
- * WebRtcCng_Generate(...)
- *
- * These functions generates CN data when needed
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - outData : pointer to area to write CN data
- * - nrOfSamples : How much data to generate
- * - new_period : >0 if a new period of CNG, will reset history
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
- size_t nrOfSamples, int16_t new_period);
-
-/*****************************************************************************
- * WebRtcCng_GetErrorCodeEnc/Dec(...)
- *
- * This functions can be used to check the error code of a CNG instance. When
- * a function returns -1 a error code will be set for that instance. The
- * function below extract the code of the last error that occurred in the
- * specified instance.
- *
- * Input:
- * - CNG_inst : CNG enc/dec instance
- *
- * Return value : Error code
- */
-int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst);
-#ifdef __cplusplus
-}
-#endif
+namespace webrtc {
+
+class ComfortNoiseDecoder {
+ public:
+ ComfortNoiseDecoder();
+ ~ComfortNoiseDecoder() = default;
+
+ ComfortNoiseDecoder(const ComfortNoiseDecoder&) = delete;
+ ComfortNoiseDecoder& operator=(const ComfortNoiseDecoder&) = delete;
+
+ void Reset();
+
+ // Updates the CN state when a new SID packet arrives.
+ // |sid| is a view of the SID packet without the headers.
+ void UpdateSid(rtc::ArrayView<const uint8_t> sid);
+
+ // Generates comfort noise.
+ // |out_data| will be filled with samples - its size determines the number of
+ // samples generated. When |new_period| is true, CNG history will be reset
+ // before any audio is generated. Returns |false| if outData is too large -
+ // currently 640 bytes (equalling 10ms at 64kHz).
+ // TODO(ossu): Specify better limits for the size of out_data. Either let it
+ // be unbounded or limit to 10ms in the current sample rate.
+ bool Generate(rtc::ArrayView<int16_t> out_data, bool new_period);
+
+ private:
+ uint32_t dec_seed_;
+ int32_t dec_target_energy_;
+ int32_t dec_used_energy_;
+ int16_t dec_target_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_used_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_filtstate_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_filtstateLow_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ uint16_t dec_order_;
+ int16_t dec_target_scale_factor_; /* Q29 */
+ int16_t dec_used_scale_factor_; /* Q29 */
+};
+
+class ComfortNoiseEncoder {
+ public:
+ // Creates a comfort noise encoder.
+ // |fs| selects sample rate: 8000 for narrowband or 16000 for wideband.
+ // |interval| sets the interval at which to generate SID data (in ms).
+ // |quality| selects the number of refl. coeffs. Maximum allowed is 12.
+ ComfortNoiseEncoder(int fs, int interval, int quality);
+ ~ComfortNoiseEncoder() = default;
+
+ ComfortNoiseEncoder(const ComfortNoiseEncoder&) = delete;
+ ComfortNoiseEncoder& operator=(const ComfortNoiseEncoder&) = delete;
+
+ // Resets the comfort noise encoder to its initial state.
+ // Parameters are set as during construction.
+ void Reset(int fs, int interval, int quality);
+
+ // Analyzes background noise from |speech| and appends coefficients to
+ // |output|. Returns the number of coefficients generated. If |force_sid| is
+ // true, a SID frame is forced and the internal sid interval counter is reset.
+ // Will fail if the input size is too large (> 640 samples, see
+ // ComfortNoiseDecoder::Generate).
+ size_t Encode(rtc::ArrayView<const int16_t> speech,
+ bool force_sid,
+ rtc::Buffer* output);
+
+ private:
+ size_t enc_nrOfCoefs_;
+ int enc_sampfreq_;
+ int16_t enc_interval_;
+ int16_t enc_msSinceSid_;
+ int32_t enc_Energy_;
+ int16_t enc_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int32_t enc_corrVector_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ uint32_t enc_seed_;
+};
+
+} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
index 9dc3a6fd7ad..7a627e757c9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_DECODER_PCM_H_
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
index a24b1526fd2..baa5d382d32 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
@@ -52,10 +52,6 @@ AudioEncoderPcm::AudioEncoderPcm(const Config& config, int sample_rate_hz)
AudioEncoderPcm::~AudioEncoderPcm() = default;
-size_t AudioEncoderPcm::MaxEncodedBytes() const {
- return full_frame_samples_ * BytesPerSample();
-}
-
int AudioEncoderPcm::SampleRateHz() const {
return sample_rate_hz_;
}
@@ -93,13 +89,14 @@ AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeImpl(
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
info.encoded_bytes =
- encoded->AppendData(MaxEncodedBytes(),
+ encoded->AppendData(full_frame_samples_ * BytesPerSample(),
[&] (rtc::ArrayView<uint8_t> encoded) {
return EncodeCall(&speech_buffer_[0],
full_frame_samples_,
encoded.data());
});
speech_buffer_.clear();
+ info.encoder_type = GetCodecType();
return info;
}
@@ -120,6 +117,10 @@ size_t AudioEncoderPcmA::BytesPerSample() const {
return 1;
}
+AudioEncoder::CodecType AudioEncoderPcmA::GetCodecType() const {
+ return AudioEncoder::CodecType::kPcmA;
+}
+
AudioEncoderPcmU::AudioEncoderPcmU(const CodecInst& codec_inst)
: AudioEncoderPcmU(CreateConfig<AudioEncoderPcmU>(codec_inst)) {}
@@ -133,4 +134,8 @@ size_t AudioEncoderPcmU::BytesPerSample() const {
return 1;
}
+AudioEncoder::CodecType AudioEncoderPcmU::GetCodecType() const {
+ return AudioEncoder::CodecType::kPcmU;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
index 6b3cebfb336..721344528f8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
@@ -35,7 +35,6 @@ class AudioEncoderPcm : public AudioEncoder {
~AudioEncoderPcm() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
@@ -56,6 +55,10 @@ class AudioEncoderPcm : public AudioEncoder {
virtual size_t BytesPerSample() const = 0;
+ // Used to set EncodedInfoLeaf::encoder_type in
+ // AudioEncoderPcm::EncodeImpl
+ virtual AudioEncoder::CodecType GetCodecType() const = 0;
+
private:
const int sample_rate_hz_;
const size_t num_channels_;
@@ -85,6 +88,8 @@ class AudioEncoderPcmA final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
static const int kSampleRateHz = 8000;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA);
@@ -107,6 +112,8 @@ class AudioEncoderPcmU final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
static const int kSampleRateHz = 8000;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
index 7cc2ea98773..1837ffabe29 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
typedef struct WebRtcG722DecInst G722DecInst;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
index 9256518445d..1f3936c8eee 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
@@ -60,10 +60,6 @@ AudioEncoderG722::AudioEncoderG722(const CodecInst& codec_inst)
AudioEncoderG722::~AudioEncoderG722() = default;
-size_t AudioEncoderG722::MaxEncodedBytes() const {
- return SamplesPerChannel() / 2 * num_channels_;
-}
-
int AudioEncoderG722::SampleRateHz() const {
return kSampleRateHz;
}
@@ -149,6 +145,7 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeImpl(
});
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
+ info.encoder_type = CodecType::kG722;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
index dec87b2b7a4..ad49a865e25 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
@@ -14,6 +14,7 @@
#include <memory>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
@@ -35,7 +36,6 @@ class AudioEncoderG722 final : public AudioEncoder {
explicit AudioEncoderG722(const CodecInst& codec_inst);
~AudioEncoderG722() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -44,7 +44,7 @@ class AudioEncoderG722 final : public AudioEncoder {
int GetTargetBitrate() const override;
void Reset() override;
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
index e890635da09..036c11fac47 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
index c7d7411c45d..ca11587dfab 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
@@ -56,10 +56,6 @@ AudioEncoderIlbc::~AudioEncoderIlbc() {
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_));
}
-size_t AudioEncoderIlbc::MaxEncodedBytes() const {
- return RequiredOutputSizeBytes();
-}
-
int AudioEncoderIlbc::SampleRateHz() const {
return kSampleRateHz;
}
@@ -131,6 +127,7 @@ AudioEncoder::EncodedInfo AudioEncoderIlbc::EncodeImpl(
info.encoded_bytes = encoded_bytes;
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = config_.payload_type;
+ info.encoder_type = CodecType::kIlbc;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
index 27329bbc4ee..63639860f45 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
@@ -34,7 +34,6 @@ class AudioEncoderIlbc final : public AudioEncoder {
explicit AudioEncoderIlbc(const CodecInst& codec_inst);
~AudioEncoderIlbc() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
index 62a686495b1..a8375afb609 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
@@ -65,15 +65,15 @@ void WebRtcIlbcfix_GetLspPoly(
{
/* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */
high = (int16_t)(fPtr[-1] >> 16);
- low = (int16_t)((fPtr[-1] - ((int32_t)high << 16)) >> 1);
+ low = (int16_t)((fPtr[-1] & 0xffff) >> 1);
- tmpW32 = ((high * *lspPtr) << 2) + (((low * *lspPtr) >> 15) << 2);
+ tmpW32 = 4 * high * *lspPtr + 4 * ((low * *lspPtr) >> 15);
(*fPtr) += fPtr[-2];
(*fPtr) -= tmpW32;
fPtr--;
}
- *fPtr -= *lspPtr << 10;
+ *fPtr -= *lspPtr * (1 << 10);
fPtr+=i;
lspPtr+=2;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
index bd101bf30ca..8b18c047b93 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
@@ -48,7 +48,7 @@ void WebRtcIlbcfix_HpOutput(
tmpW32 = (tmpW32>>15);
tmpW32 += y[0] * ba[3]; /* (-a[1])*y[i-1] (high part) */
tmpW32 += y[2] * ba[4]; /* (-a[2])*y[i-2] (high part) */
- tmpW32 = (tmpW32<<1);
+ tmpW32 *= 2;
tmpW32 += signal[i] * ba[0]; /* b[0]*x[0] */
tmpW32 += x[0] * ba[1]; /* b[1]*x[i-1] */
@@ -77,11 +77,11 @@ void WebRtcIlbcfix_HpOutput(
} else if (tmpW32<-268435456) {
tmpW32 = WEBRTC_SPL_WORD32_MIN;
} else {
- tmpW32 <<= 3;
+ tmpW32 *= 8;
}
y[0] = (int16_t)(tmpW32 >> 16);
- y[1] = (int16_t)((tmpW32 - (y[0] << 16)) >> 1);
+ y[1] = (int16_t)((tmpW32 & 0xffff) >> 1);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
index d4f6a4a41e6..1aba106f909 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
@@ -15,6 +15,10 @@
'audio_decoder.cc',
'audio_decoder.h',
],
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_approved',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
},
{
@@ -24,6 +28,10 @@
'audio_encoder.cc',
'audio_encoder.h',
],
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_approved',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
},
],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
index d9d20ec0396..b1907bbb394 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
index 0da8ed71d66..f1f2714ff9c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
@@ -56,7 +57,6 @@ class AudioEncoderIsacT final : public AudioEncoder {
const rtc::scoped_refptr<LockedIsacBandwidthInfo>& bwinfo);
~AudioEncoderIsacT() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
index 1debbeb9038..b6a1747c391 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
@@ -80,11 +80,6 @@ AudioEncoderIsacT<T>::~AudioEncoderIsacT() {
}
template <typename T>
-size_t AudioEncoderIsacT<T>::MaxEncodedBytes() const {
- return kSufficientEncodeBufferSizeBytes;
-}
-
-template <typename T>
int AudioEncoderIsacT<T>::SampleRateHz() const {
return T::EncSampRate(isac_state_);
}
@@ -150,6 +145,7 @@ AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeImpl(
info.encoded_bytes = encoded_bytes;
info.encoded_timestamp = packet_timestamp_;
info.payload_type = config_.payload_type;
+ info.encoder_type = CodecType::kIsac;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
index fdbb2fcb0d7..001a04f39bf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
@@ -90,7 +90,7 @@ void WebRtcIsacfix_Spec2TimeC(int16_t* inreQ7,
int32_t* outre1Q16,
int32_t* outre2Q16);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9,
int16_t* inre2Q9,
int16_t* outre,
@@ -174,7 +174,7 @@ void WebRtcIsacfix_FilterMaLoopC(int16_t input0,
int32_t* ptr1,
int32_t* ptr2);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r,
const int16_t* __restrict x,
int16_t N,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
index 2c8c923cd33..1b87d0ea557 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
@@ -147,7 +147,7 @@ void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[],
const int matrix0_index_factor,
const int matrix0_index_step);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[],
const int32_t matrix1[],
int32_t matrix_product[],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
index 0e67e300ac1..d488339b31f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
@@ -60,7 +60,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16C(
int32_t *filter_state_ch1,
int32_t *filter_state_ch2);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_AllpassFilter2FixDec16Neon(
int16_t *data_ch1,
int16_t *data_ch2,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
index 0ec115414b8..4b03181e456 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
@@ -64,11 +64,7 @@ class FilterBanksTest : public testing::Test {
TEST_F(FilterBanksTest, AllpassFilter2FixDec16Test) {
CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16C);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
index 5cce1e9f0b2..3ed57788a1f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
@@ -59,11 +59,7 @@ class FiltersTest : public testing::Test {
TEST_F(FiltersTest, AutocorrFixTest) {
FiltersTester(WebRtcIsacfix_AutocorrC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- FiltersTester(WebRtcIsacfix_AutocorrNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
FiltersTester(WebRtcIsacfix_AutocorrNeon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
index aba3aa0c0bf..e7905ae81fa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
@@ -201,7 +201,7 @@ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
* This function initializes function pointers for ARM Neon platform.
*/
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
static void WebRtcIsacfix_InitNeon(void) {
WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrNeon;
WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopNeon;
@@ -253,11 +253,7 @@ static void InitFunctionPointers(void) {
WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1C;
WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2C;
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- WebRtcIsacfix_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcIsacfix_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
index 18377dd370f..0d881e80442 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
@@ -57,8 +57,6 @@ void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) {
ysum32 += in[PITCH_CORR_LEN2 + k - 1] * in[PITCH_CORR_LEN2 + k - 1] >>
scaling;
- // TODO(zhongwei.yao): Move this function into a separate NEON code file so
- // that WEBRTC_DETECT_NEON could take advantage of it.
#ifdef WEBRTC_HAS_NEON
{
int32_t vbuff[4];
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
index 58d890011fe..c5cc87ffce2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
@@ -179,22 +179,14 @@ class TransformTest : public testing::Test {
TEST_F(TransformTest, Time2SpecTest) {
Time2SpecTester(WebRtcIsacfix_Time2SpecC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- Time2SpecTester(WebRtcIsacfix_Time2SpecNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
Time2SpecTester(WebRtcIsacfix_Time2SpecNeon);
#endif
}
TEST_F(TransformTest, Spec2TimeTest) {
Spec2TimeTester(WebRtcIsacfix_Spec2TimeC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
index 32f36c52617..276eb60e280 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
@@ -25,10 +25,10 @@ class IsacSpeedTest : public AudioCodecSpeedTest {
IsacSpeedTest();
void SetUp() override;
void TearDown() override;
- virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
- size_t max_bytes, size_t* encoded_bytes);
- virtual float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
- int16_t* out_data);
+ float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ size_t max_bytes, size_t* encoded_bytes) override;
+ float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
+ int16_t* out_data) override;
ISACFIX_MainStruct *ISACFIX_main_inst_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
index 63e4928bd88..47bbe31b8ae 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
@@ -214,10 +214,10 @@ int WebRtcIsac_DecHistOneStepMulti(int *data, /* output: data vector */
if (streamdata->stream_index == 0) /* first time decoder is called for this stream */
{
/* read first word from bytestream */
- streamval = *stream_ptr << 24;
- streamval |= *++stream_ptr << 16;
- streamval |= *++stream_ptr << 8;
- streamval |= *++stream_ptr;
+ streamval = (uint32_t)(*stream_ptr) << 24;
+ streamval |= (uint32_t)(*++stream_ptr) << 16;
+ streamval |= (uint32_t)(*++stream_ptr) << 8;
+ streamval |= (uint32_t)(*++stream_ptr);
} else {
streamval = streamdata->streamval;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
index c1204ad03ad..f920dc2ef8b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
@@ -162,9 +162,9 @@ static void FindInvArSpec(const int16_t* ARCoefQ12,
}
for (k = 0; k < FRAMESAMPLES / 8; k++) {
- CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] -
- (diffQ16[k] << shftVal);
- CurveQ16[k] += diffQ16[k] << shftVal;
+ int32_t diff_q16_shifted = (int32_t)((uint32_t)(diffQ16[k]) << shftVal);
+ CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] - diff_q16_shifted;
+ CurveQ16[k] += diff_q16_shifted;
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h
new file mode 100644
index 00000000000..6e5737c89b8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+
+namespace webrtc {
+
+class MockAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ MOCK_METHOD0(GetSupportedFormats, std::vector<SdpAudioFormat>());
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) {
+ std::unique_ptr<AudioDecoder> return_value;
+ MakeAudioDecoderMock(format, &return_value);
+ return return_value;
+ }
+ MOCK_METHOD2(MakeAudioDecoderMock,
+ void(const SdpAudioFormat& format,
+ std::unique_ptr<AudioDecoder>* return_value));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
index 52849691ac6..a674eba6607 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
@@ -49,26 +49,4 @@ AudioEncoder::EncodedInfo MockAudioEncoder::CopyEncoding::operator()(
return info_;
}
-MockAudioEncoderDeprecated::CopyEncoding::CopyEncoding(
- AudioEncoder::EncodedInfo info,
- rtc::ArrayView<const uint8_t> payload)
- : info_(info), payload_(payload) { }
-
-MockAudioEncoderDeprecated::CopyEncoding::CopyEncoding(
- rtc::ArrayView<const uint8_t> payload)
- : payload_(payload) {
- info_.encoded_bytes = payload_.size();
-}
-
-AudioEncoder::EncodedInfo MockAudioEncoderDeprecated::CopyEncoding::operator()(
- uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_bytes_encoded,
- uint8_t* encoded) {
- RTC_CHECK(encoded);
- RTC_CHECK_LE(info_.encoded_bytes, payload_.size());
- std::memcpy(encoded, payload_.data(), info_.encoded_bytes);
- return info_;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
index 58a1e756f97..2ffb30b708a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_ENCODER_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_ENCODER_H_
+#include <string>
+
#include "webrtc/base/array_view.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
@@ -18,12 +20,15 @@
namespace webrtc {
-class MockAudioEncoderBase : public AudioEncoder {
+class MockAudioEncoder : public AudioEncoder {
public:
- ~MockAudioEncoderBase() override { Die(); }
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ ~MockAudioEncoder() /* override */ { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD1(Mark, void(std::string desc));
- MOCK_CONST_METHOD0(MaxEncodedBytes, size_t());
MOCK_CONST_METHOD0(SampleRateHz, int());
MOCK_CONST_METHOD0(NumChannels, size_t());
MOCK_CONST_METHOD0(RtpTimestampRateHz, int());
@@ -39,10 +44,7 @@ class MockAudioEncoderBase : public AudioEncoder {
MOCK_METHOD1(SetTargetBitrate, void(int target_bps));
MOCK_METHOD1(SetMaxBitrate, void(int max_bps));
MOCK_METHOD1(SetMaxPayloadSize, void(int max_payload_size_bytes));
-};
-class MockAudioEncoder final : public MockAudioEncoderBase {
- public:
// Note, we explicitly chose not to create a mock for the Encode method.
MOCK_METHOD3(EncodeImpl,
EncodedInfo(uint32_t timestamp,
@@ -53,11 +55,11 @@ class MockAudioEncoder final : public MockAudioEncoderBase {
public:
// Creates a functor that will return |info| and adjust the rtc::Buffer
// given as input to it, so it is info.encoded_bytes larger.
- FakeEncoding(const AudioEncoder::EncodedInfo& info);
+ explicit FakeEncoding(const AudioEncoder::EncodedInfo& info);
// Shorthand version of the constructor above, for when only setting
// encoded_bytes in the EncodedInfo object matters.
- FakeEncoding(size_t encoded_bytes);
+ explicit FakeEncoding(size_t encoded_bytes);
AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
rtc::ArrayView<const int16_t> audio,
@@ -80,41 +82,12 @@ class MockAudioEncoder final : public MockAudioEncoderBase {
// Shorthand version of the constructor above, for when you wish to append
// the whole payload and do not care about any EncodedInfo attribute other
// than encoded_bytes.
- CopyEncoding(rtc::ArrayView<const uint8_t> payload);
+ explicit CopyEncoding(rtc::ArrayView<const uint8_t> payload);
AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded);
- private:
- AudioEncoder::EncodedInfo info_;
- rtc::ArrayView<const uint8_t> payload_;
- };
-
-};
-
-class MockAudioEncoderDeprecated final : public MockAudioEncoderBase {
- public:
- // Note, we explicitly chose not to create a mock for the Encode method.
- MOCK_METHOD4(EncodeInternal,
- EncodedInfo(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded));
- // A functor like MockAudioEncoder::CopyEncoding above, but which has the
- // deprecated Encode signature. Currently only used in one test and should be
- // removed once that backwards compatibility is.
- class CopyEncoding {
- public:
- CopyEncoding(AudioEncoder::EncodedInfo info,
- rtc::ArrayView<const uint8_t> payload);
-
- CopyEncoding(rtc::ArrayView<const uint8_t> payload);
-
- AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_bytes_encoded,
- uint8_t* encoded);
private:
AudioEncoder::EncodedInfo info_;
rtc::ArrayView<const uint8_t> payload_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
index af32a84512e..be48ca988ef 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
index a599e291d47..a2497c7862a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
+#include <algorithm>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_types.h"
@@ -100,16 +102,6 @@ AudioEncoderOpus::~AudioEncoderOpus() {
RTC_CHECK_EQ(0, WebRtcOpus_EncoderFree(inst_));
}
-size_t AudioEncoderOpus::MaxEncodedBytes() const {
- // Calculate the number of bytes we expect the encoder to produce,
- // then multiply by two to give a wide margin for error.
- const size_t bytes_per_millisecond =
- static_cast<size_t>(config_.bitrate_bps / (1000 * 8) + 1);
- const size_t approx_encoded_bytes =
- Num10msFramesPerPacket() * 10 * bytes_per_millisecond;
- return 2 * approx_encoded_bytes;
-}
-
int AudioEncoderOpus::SampleRateHz() const {
return kSampleRateHz;
}
@@ -198,7 +190,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeImpl(
RTC_CHECK_EQ(input_buffer_.size(),
Num10msFramesPerPacket() * SamplesPer10msFrame());
- const size_t max_encoded_bytes = MaxEncodedBytes();
+ const size_t max_encoded_bytes = SufficientOutputBufferSize();
EncodedInfo info;
info.encoded_bytes =
encoded->AppendData(
@@ -220,6 +212,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeImpl(
info.payload_type = config_.payload_type;
info.send_even_if_empty = true; // Allows Opus to send empty packets.
info.speech = (info.encoded_bytes > 0);
+ info.encoder_type = CodecType::kOpus;
return info;
}
@@ -231,6 +224,16 @@ size_t AudioEncoderOpus::SamplesPer10msFrame() const {
return rtc::CheckedDivExact(kSampleRateHz, 100) * config_.num_channels;
}
+size_t AudioEncoderOpus::SufficientOutputBufferSize() const {
+ // Calculate the number of bytes we expect the encoder to produce,
+ // then multiply by two to give a wide margin for error.
+ const size_t bytes_per_millisecond =
+ static_cast<size_t>(config_.bitrate_bps / (1000 * 8) + 1);
+ const size_t approx_encoded_bytes =
+ Num10msFramesPerPacket() * 10 * bytes_per_millisecond;
+ return 2 * approx_encoded_bytes;
+}
+
// If the given config is OK, recreate the Opus encoder instance with those
// settings, save the config, and return true. Otherwise, do nothing and return
// false.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
index 3f11af1f9e0..8900659f48e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
@@ -54,7 +54,6 @@ class AudioEncoderOpus final : public AudioEncoder {
explicit AudioEncoderOpus(const CodecInst& codec_inst);
~AudioEncoderOpus() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
@@ -79,7 +78,7 @@ class AudioEncoderOpus final : public AudioEncoder {
ApplicationMode application() const { return config_.application; }
bool dtx_enabled() const { return config_.dtx_enabled; }
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
@@ -87,6 +86,7 @@ protected:
private:
size_t Num10msFramesPerPacket() const;
size_t SamplesPer10msFrame() const;
+ size_t SufficientOutputBufferSize() const;
bool RecreateEncoderInstance(const Config& config);
Config config_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
index 4d1aa42c89f..7165d29c8b4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
@@ -23,10 +23,10 @@ class OpusSpeedTest : public AudioCodecSpeedTest {
OpusSpeedTest();
void SetUp() override;
void TearDown() override;
- virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
- size_t max_bytes, size_t* encoded_bytes);
- virtual float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
- int16_t* out_data);
+ float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ size_t max_bytes, size_t* encoded_bytes) override;
+ float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
+ int16_t* out_data) override;
WebRtcOpusEncInst* opus_encoder_;
WebRtcOpusDecInst* opus_decoder_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
index f4d40223024..cafd3e851bd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
@@ -26,6 +26,10 @@ size_t AudioEncoderPcm16B::BytesPerSample() const {
return 2;
}
+AudioEncoder::CodecType AudioEncoderPcm16B::GetCodecType() const {
+ return CodecType::kOther;
+}
+
namespace {
AudioEncoderPcm16B::Config CreateConfig(const CodecInst& codec_inst) {
AudioEncoderPcm16B::Config config;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
index 34a780b49de..bdc27a67e30 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
namespace webrtc {
@@ -38,6 +39,8 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index 4275f54103a..37fa55a4da1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -19,12 +19,7 @@
namespace webrtc {
AudioEncoderCopyRed::Config::Config() = default;
-
-// TODO(kwiberg): =default this when Visual Studio learns to handle it.
-AudioEncoderCopyRed::Config::Config(Config&& c)
- : payload_type(c.payload_type),
- speech_encoder(std::move(c.speech_encoder)) {}
-
+AudioEncoderCopyRed::Config::Config(Config&&) = default;
AudioEncoderCopyRed::Config::~Config() = default;
AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config)
@@ -35,10 +30,6 @@ AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config)
AudioEncoderCopyRed::~AudioEncoderCopyRed() = default;
-size_t AudioEncoderCopyRed::MaxEncodedBytes() const {
- return 2 * speech_encoder_->MaxEncodedBytes();
-}
-
int AudioEncoderCopyRed::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
@@ -132,4 +123,9 @@ void AudioEncoderCopyRed::SetTargetBitrate(int bits_per_second) {
speech_encoder_->SetTargetBitrate(bits_per_second);
}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoderCopyRed::ReclaimContainedEncoders() {
+ return rtc::ArrayView<std::unique_ptr<AudioEncoder>>(&speech_encoder_, 1);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
index a67ae486bb2..a08118364cc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
namespace webrtc {
@@ -37,7 +38,6 @@ class AudioEncoderCopyRed final : public AudioEncoder {
~AudioEncoderCopyRed() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -51,8 +51,10 @@ class AudioEncoderCopyRed final : public AudioEncoder {
void SetMaxPlaybackRate(int frequency_hz) override;
void SetProjectedPacketLossRate(double fraction) override;
void SetTargetBitrate(int target_bps) override;
+ rtc::ArrayView<std::unique_ptr<AudioEncoder>> ReclaimContainedEncoders()
+ override;
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index c73cb9f2096..22b2ceb5f79 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -26,7 +26,6 @@ using ::testing::MockFunction;
namespace webrtc {
namespace {
-static const size_t kMockMaxEncodedBytes = 1000;
static const size_t kMaxNumSamples = 48 * 10 * 2; // 10 ms @ 48 kHz stereo.
}
@@ -46,8 +45,6 @@ class AudioEncoderCopyRedTest : public ::testing::Test {
EXPECT_CALL(*mock_encoder_, NumChannels()).WillRepeatedly(Return(1U));
EXPECT_CALL(*mock_encoder_, SampleRateHz())
.WillRepeatedly(Return(sample_rate_hz_));
- EXPECT_CALL(*mock_encoder_, MaxEncodedBytes())
- .WillRepeatedly(Return(kMockMaxEncodedBytes));
}
void TearDown() override {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h b/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
index 381e35e639b..daf9ac8ae9a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
#define WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
+#include <memory>
#include <string>
#include <vector>
@@ -686,13 +687,24 @@ class AudioCodingModule {
// and other relevant parameters, c.f.
// module_common_types.h for the definition of
// AudioFrame.
+ // -muted : if true, the sample data in audio_frame is not
+ // populated, and must be interpreted as all zero.
//
// Return value:
// -1 if the function fails,
// 0 if the function succeeds.
//
virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
- AudioFrame* audio_frame) = 0;
+ AudioFrame* audio_frame,
+ bool* muted) = 0;
+
+ /////////////////////////////////////////////////////////////////////////////
+ // Same as above, but without the muted parameter. This methods should not be
+ // used if enable_fast_accelerate was set to true in NetEq::Config.
+ // TODO(henrik.lundin) Remove this method when downstream dependencies are
+ // ready.
+ virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
+ AudioFrame* audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
// Codec specific
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
index d800cc7dbe9..762c3859837 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
@@ -13,7 +13,6 @@
#include <assert.h>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
#ifdef WEBRTC_CODEC_G722
#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
@@ -36,43 +35,6 @@
namespace webrtc {
-AudioDecoderCng::AudioDecoderCng() {
- RTC_CHECK_EQ(0, WebRtcCng_CreateDec(&dec_state_));
- WebRtcCng_InitDec(dec_state_);
-}
-
-AudioDecoderCng::~AudioDecoderCng() {
- WebRtcCng_FreeDec(dec_state_);
-}
-
-void AudioDecoderCng::Reset() {
- WebRtcCng_InitDec(dec_state_);
-}
-
-int AudioDecoderCng::IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return -1;
-}
-
-CNG_dec_inst* AudioDecoderCng::CngDecoderInstance() {
- return dec_state_;
-}
-
-size_t AudioDecoderCng::Channels() const {
- return 1;
-}
-
-int AudioDecoderCng::DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) {
- return -1;
-}
-
bool CodecSupported(NetEqDecoder codec_type) {
switch (codec_type) {
case NetEqDecoder::kDecoderPCMu:
@@ -175,67 +137,4 @@ int CodecSampleRateHz(NetEqDecoder codec_type) {
}
}
-AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type) {
- if (!CodecSupported(codec_type)) {
- return NULL;
- }
- switch (codec_type) {
- case NetEqDecoder::kDecoderPCMu:
- return new AudioDecoderPcmU(1);
- case NetEqDecoder::kDecoderPCMa:
- return new AudioDecoderPcmA(1);
- case NetEqDecoder::kDecoderPCMu_2ch:
- return new AudioDecoderPcmU(2);
- case NetEqDecoder::kDecoderPCMa_2ch:
- return new AudioDecoderPcmA(2);
-#ifdef WEBRTC_CODEC_ILBC
- case NetEqDecoder::kDecoderILBC:
- return new AudioDecoderIlbc;
-#endif
-#if defined(WEBRTC_CODEC_ISACFX)
- case NetEqDecoder::kDecoderISAC:
- return new AudioDecoderIsacFix();
-#elif defined(WEBRTC_CODEC_ISAC)
- case NetEqDecoder::kDecoderISAC:
- case NetEqDecoder::kDecoderISACswb:
- return new AudioDecoderIsac();
-#endif
- case NetEqDecoder::kDecoderPCM16B:
- case NetEqDecoder::kDecoderPCM16Bwb:
- case NetEqDecoder::kDecoderPCM16Bswb32kHz:
- case NetEqDecoder::kDecoderPCM16Bswb48kHz:
- return new AudioDecoderPcm16B(1);
- case NetEqDecoder::kDecoderPCM16B_2ch:
- case NetEqDecoder::kDecoderPCM16Bwb_2ch:
- case NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch:
- case NetEqDecoder::kDecoderPCM16Bswb48kHz_2ch:
- return new AudioDecoderPcm16B(2);
- case NetEqDecoder::kDecoderPCM16B_5ch:
- return new AudioDecoderPcm16B(5);
-#ifdef WEBRTC_CODEC_G722
- case NetEqDecoder::kDecoderG722:
- return new AudioDecoderG722;
- case NetEqDecoder::kDecoderG722_2ch:
- return new AudioDecoderG722Stereo;
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- case NetEqDecoder::kDecoderOpus:
- return new AudioDecoderOpus(1);
- case NetEqDecoder::kDecoderOpus_2ch:
- return new AudioDecoderOpus(2);
-#endif
- case NetEqDecoder::kDecoderCNGnb:
- case NetEqDecoder::kDecoderCNGwb:
- case NetEqDecoder::kDecoderCNGswb32kHz:
- case NetEqDecoder::kDecoderCNGswb48kHz:
- return new AudioDecoderCng;
- case NetEqDecoder::kDecoderRED:
- case NetEqDecoder::kDecoderAVT:
- case NetEqDecoder::kDecoderArbitrary:
- default: {
- return NULL;
- }
- }
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
index bc8bdd9626d..579ccb36f7a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
@@ -16,7 +16,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#ifdef WEBRTC_CODEC_G722
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#endif
@@ -25,38 +24,6 @@
namespace webrtc {
-// AudioDecoderCng is a special type of AudioDecoder. It inherits from
-// AudioDecoder just to fit in the DecoderDatabase. None of the class methods
-// should be used, except constructor, destructor, and accessors.
-// TODO(hlundin): Consider the possibility to create a super-class to
-// AudioDecoder that is stored in DecoderDatabase. Then AudioDecoder and a
-// specific CngDecoder class could both inherit from that class.
-class AudioDecoderCng : public AudioDecoder {
- public:
- explicit AudioDecoderCng();
- ~AudioDecoderCng() override;
- void Reset() override;
- int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) override;
-
- CNG_dec_inst* CngDecoderInstance() override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- CNG_dec_inst* dec_state_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderCng);
-};
-
using NetEqDecoder = acm2::RentACodec::NetEqDecoder;
// Returns true if |codec_type| is supported.
@@ -65,10 +32,5 @@ bool CodecSupported(NetEqDecoder codec_type);
// Returns the sample rate for |codec_type|.
int CodecSampleRateHz(NetEqDecoder codec_type);
-// Creates an AudioDecoder object of type |codec_type|. Returns NULL for for
-// unsupported codecs, and when creating an AudioDecoder is not applicable
-// (e.g., for RED and DTMF/AVT types).
-AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type);
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
index bd38c43903a..c80909d7b58 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
@@ -106,7 +106,7 @@ void AudioMultiVector::PushBackFromIndex(const AudioMultiVector& append_this,
assert(num_channels_ == append_this.num_channels_);
if (num_channels_ == append_this.num_channels_) {
for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PushBack(&append_this[i][index], length);
+ channels_[i]->PushBack(append_this[i], length, index);
}
}
}
@@ -133,14 +133,14 @@ size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index,
int16_t* destination) const {
RTC_DCHECK(destination);
size_t index = 0; // Number of elements written to |destination| so far.
- assert(start_index <= Size());
+ RTC_DCHECK_LE(start_index, Size());
start_index = std::min(start_index, Size());
if (length + start_index > Size()) {
length = Size() - start_index;
}
if (num_channels_ == 1) {
// Special case to avoid the nested for loop below.
- memcpy(destination, &(*this)[0][start_index], length * sizeof(int16_t));
+ (*this)[0].CopyTo(length, start_index, destination);
return length;
}
for (size_t i = 0; i < length; ++i) {
@@ -167,7 +167,7 @@ void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this,
length = std::min(length, insert_this.Size());
if (num_channels_ == insert_this.num_channels_) {
for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->OverwriteAt(&insert_this[i][0], length, position);
+ channels_[i]->OverwriteAt(insert_this[i], length, position);
}
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
index 013e1d89ad9..ea737a55424 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
@@ -15,124 +15,236 @@
#include <algorithm>
#include <memory>
+#include "webrtc/base/checks.h"
#include "webrtc/typedefs.h"
namespace webrtc {
AudioVector::AudioVector()
- : array_(new int16_t[kDefaultInitialSize]),
- first_free_ix_(0),
- capacity_(kDefaultInitialSize) {
+ : AudioVector(kDefaultInitialSize) {
+ Clear();
}
AudioVector::AudioVector(size_t initial_size)
- : array_(new int16_t[initial_size]),
- first_free_ix_(initial_size),
- capacity_(initial_size) {
- memset(array_.get(), 0, initial_size * sizeof(int16_t));
+ : array_(new int16_t[initial_size + 1]),
+ capacity_(initial_size + 1),
+ begin_index_(0),
+ end_index_(capacity_ - 1) {
+ memset(array_.get(), 0, capacity_ * sizeof(int16_t));
}
AudioVector::~AudioVector() = default;
void AudioVector::Clear() {
- first_free_ix_ = 0;
+ end_index_ = begin_index_ = 0;
}
void AudioVector::CopyTo(AudioVector* copy_to) const {
- if (copy_to) {
- copy_to->Reserve(Size());
- assert(copy_to->capacity_ >= Size());
- memcpy(copy_to->array_.get(), array_.get(), Size() * sizeof(int16_t));
- copy_to->first_free_ix_ = first_free_ix_;
+ RTC_DCHECK(copy_to);
+ copy_to->Reserve(Size());
+ CopyTo(Size(), 0, copy_to->array_.get());
+ copy_to->begin_index_ = 0;
+ copy_to->end_index_ = Size();
+}
+
+void AudioVector::CopyTo(
+ size_t length, size_t position, int16_t* copy_to) const {
+ if (length == 0)
+ return;
+ length = std::min(length, Size() - position);
+ const size_t copy_index = (begin_index_ + position) % capacity_;
+ const size_t first_chunk_length =
+ std::min(length, capacity_ - copy_index);
+ memcpy(copy_to, &array_[copy_index],
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(&copy_to[first_chunk_length], array_.get(),
+ remaining_length * sizeof(int16_t));
}
}
void AudioVector::PushFront(const AudioVector& prepend_this) {
- size_t insert_length = prepend_this.Size();
- Reserve(Size() + insert_length);
- memmove(&array_[insert_length], &array_[0], Size() * sizeof(int16_t));
- memcpy(&array_[0], &prepend_this.array_[0], insert_length * sizeof(int16_t));
- first_free_ix_ += insert_length;
+ const size_t length = prepend_this.Size();
+ if (length == 0)
+ return;
+
+ // Although the subsequent calling to PushFront does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ Reserve(Size() + length);
+
+ const size_t first_chunk_length =
+ std::min(length, prepend_this.capacity_ - prepend_this.begin_index_);
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0)
+ PushFront(prepend_this.array_.get(), remaining_length);
+ PushFront(&prepend_this.array_[prepend_this.begin_index_],
+ first_chunk_length);
}
void AudioVector::PushFront(const int16_t* prepend_this, size_t length) {
- // Same operation as InsertAt beginning.
- InsertAt(prepend_this, length, 0);
+ if (length == 0)
+ return;
+ Reserve(Size() + length);
+ const size_t first_chunk_length = std::min(length, begin_index_);
+ memcpy(&array_[begin_index_ - first_chunk_length],
+ &prepend_this[length - first_chunk_length],
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(&array_[capacity_ - remaining_length], prepend_this,
+ remaining_length * sizeof(int16_t));
+ }
+ begin_index_ = (begin_index_ + capacity_ - length) % capacity_;
}
void AudioVector::PushBack(const AudioVector& append_this) {
- PushBack(append_this.array_.get(), append_this.Size());
+ PushBack(append_this, append_this.Size(), 0);
+}
+
+void AudioVector::PushBack(
+ const AudioVector& append_this, size_t length, size_t position) {
+ RTC_DCHECK_LE(position, append_this.Size());
+ RTC_DCHECK_LE(length, append_this.Size() - position);
+
+ if (length == 0)
+ return;
+
+ // Although the subsequent calling to PushBack does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ Reserve(Size() + length);
+
+ const size_t start_index =
+ (append_this.begin_index_ + position) % append_this.capacity_;
+ const size_t first_chunk_length = std::min(
+ length, append_this.capacity_ - start_index);
+ PushBack(&append_this.array_[start_index], first_chunk_length);
+
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0)
+ PushBack(append_this.array_.get(), remaining_length);
}
void AudioVector::PushBack(const int16_t* append_this, size_t length) {
+ if (length == 0)
+ return;
Reserve(Size() + length);
- memcpy(&array_[first_free_ix_], append_this, length * sizeof(int16_t));
- first_free_ix_ += length;
+ const size_t first_chunk_length = std::min(length, capacity_ - end_index_);
+ memcpy(&array_[end_index_], append_this,
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(array_.get(), &append_this[first_chunk_length],
+ remaining_length * sizeof(int16_t));
+ }
+ end_index_ = (end_index_ + length) % capacity_;
}
void AudioVector::PopFront(size_t length) {
- if (length >= Size()) {
- // Remove all elements.
- Clear();
- } else {
- size_t remaining_samples = Size() - length;
- memmove(&array_[0], &array_[length], remaining_samples * sizeof(int16_t));
- first_free_ix_ -= length;
- }
+ if (length == 0)
+ return;
+ length = std::min(length, Size());
+ begin_index_ = (begin_index_ + length) % capacity_;
}
void AudioVector::PopBack(size_t length) {
+ if (length == 0)
+ return;
// Never remove more than what is in the array.
length = std::min(length, Size());
- first_free_ix_ -= length;
+ end_index_ = (end_index_ + capacity_ - length) % capacity_;
}
void AudioVector::Extend(size_t extra_length) {
- Reserve(Size() + extra_length);
- memset(&array_[first_free_ix_], 0, extra_length * sizeof(int16_t));
- first_free_ix_ += extra_length;
+ if (extra_length == 0)
+ return;
+ InsertZerosByPushBack(extra_length, Size());
}
void AudioVector::InsertAt(const int16_t* insert_this,
size_t length,
size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
+ if (length == 0)
+ return;
+ // Cap the insert position at the current array length.
position = std::min(Size(), position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memcpy(insert_position_ptr, insert_this, length * sizeof(int16_t));
- first_free_ix_ += length;
+
+ // When inserting to a position closer to the beginning, it is more efficient
+ // to insert by pushing front than to insert by pushing back, since less data
+ // will be moved, vice versa.
+ if (position <= Size() - position) {
+ InsertByPushFront(insert_this, length, position);
+ } else {
+ InsertByPushBack(insert_this, length, position);
+ }
}
void AudioVector::InsertZerosAt(size_t length,
size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
- position = std::min(capacity_, position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memset(insert_position_ptr, 0, length * sizeof(int16_t));
- first_free_ix_ += length;
+ if (length == 0)
+ return;
+ // Cap the insert position at the current array length.
+ position = std::min(Size(), position);
+
+ // When inserting to a position closer to the beginning, it is more efficient
+ // to insert by pushing front than to insert by pushing back, since less data
+ // will be moved, vice versa.
+ if (position <= Size() - position) {
+ InsertZerosByPushFront(length, position);
+ } else {
+ InsertZerosByPushBack(length, position);
+ }
+}
+
+void AudioVector::OverwriteAt(const AudioVector& insert_this,
+ size_t length,
+ size_t position) {
+ RTC_DCHECK_LE(length, insert_this.Size());
+ if (length == 0)
+ return;
+
+ // Cap the insert position at the current array length.
+ position = std::min(Size(), position);
+
+ // Although the subsequent calling to OverwriteAt does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ size_t new_size = std::max(Size(), position + length);
+ Reserve(new_size);
+
+ const size_t first_chunk_length =
+ std::min(length, insert_this.capacity_ - insert_this.begin_index_);
+ OverwriteAt(&insert_this.array_[insert_this.begin_index_], first_chunk_length,
+ position);
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ OverwriteAt(insert_this.array_.get(), remaining_length,
+ position + first_chunk_length);
+ }
}
void AudioVector::OverwriteAt(const int16_t* insert_this,
size_t length,
size_t position) {
+ if (length == 0)
+ return;
// Cap the insert position at the current array length.
position = std::min(Size(), position);
- Reserve(position + length);
- memcpy(&array_[position], insert_this, length * sizeof(int16_t));
- if (position + length > Size()) {
- // Array was expanded.
- first_free_ix_ += position + length - Size();
+
+ size_t new_size = std::max(Size(), position + length);
+ Reserve(new_size);
+
+ const size_t overwrite_index = (begin_index_ + position) % capacity_;
+ const size_t first_chunk_length =
+ std::min(length, capacity_ - overwrite_index);
+ memcpy(&array_[overwrite_index], insert_this,
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(array_.get(), &insert_this[first_chunk_length],
+ remaining_length * sizeof(int16_t));
}
+
+ end_index_ = (begin_index_ + new_size) % capacity_;
}
void AudioVector::CrossFade(const AudioVector& append_this,
@@ -142,7 +254,7 @@ void AudioVector::CrossFade(const AudioVector& append_this,
assert(fade_length <= append_this.Size());
fade_length = std::min(fade_length, Size());
fade_length = std::min(fade_length, append_this.Size());
- size_t position = Size() - fade_length;
+ size_t position = Size() - fade_length + begin_index_;
// Cross fade the overlapping regions.
// |alpha| is the mixing factor in Q14.
// TODO(hlundin): Consider skipping +1 in the denominator to produce a
@@ -151,41 +263,132 @@ void AudioVector::CrossFade(const AudioVector& append_this,
int alpha = 16384;
for (size_t i = 0; i < fade_length; ++i) {
alpha -= alpha_step;
- array_[position + i] = (alpha * array_[position + i] +
- (16384 - alpha) * append_this[i] + 8192) >> 14;
+ array_[(position + i) % capacity_] =
+ (alpha * array_[(position + i) % capacity_] +
+ (16384 - alpha) * append_this[i] + 8192) >> 14;
}
assert(alpha >= 0); // Verify that the slope was correct.
// Append what is left of |append_this|.
size_t samples_to_push_back = append_this.Size() - fade_length;
if (samples_to_push_back > 0)
- PushBack(&append_this[fade_length], samples_to_push_back);
+ PushBack(append_this, samples_to_push_back, fade_length);
}
// Returns the number of elements in this AudioVector.
size_t AudioVector::Size() const {
- return first_free_ix_;
+ return (end_index_ + capacity_ - begin_index_) % capacity_;
}
// Returns true if this AudioVector is empty.
bool AudioVector::Empty() const {
- return first_free_ix_ == 0;
+ return begin_index_ == end_index_;
}
const int16_t& AudioVector::operator[](size_t index) const {
- return array_[index];
+ return array_[(begin_index_ + index) % capacity_];
}
int16_t& AudioVector::operator[](size_t index) {
- return array_[index];
+ return array_[(begin_index_ + index) % capacity_];
}
void AudioVector::Reserve(size_t n) {
- if (capacity_ < n) {
- std::unique_ptr<int16_t[]> temp_array(new int16_t[n]);
- memcpy(temp_array.get(), array_.get(), Size() * sizeof(int16_t));
- array_.swap(temp_array);
- capacity_ = n;
+ if (capacity_ > n)
+ return;
+ const size_t length = Size();
+ // Reserve one more sample to remove the ambiguity between empty vector and
+ // full vector. Therefore |begin_index_| == |end_index_| indicates empty
+ // vector, and |begin_index_| == (|end_index_| + 1) % capacity indicates
+ // full vector.
+ std::unique_ptr<int16_t[]> temp_array(new int16_t[n + 1]);
+ CopyTo(length, 0, temp_array.get());
+ array_.swap(temp_array);
+ begin_index_ = 0;
+ end_index_ = length;
+ capacity_ = n + 1;
+}
+
+void AudioVector::InsertByPushBack(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ const size_t move_chunk_length = Size() - position;
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (move_chunk_length > 0) {
+ // TODO(minyue): see if it is possible to avoid copying to a buffer.
+ temp_array.reset(new int16_t[move_chunk_length]);
+ CopyTo(move_chunk_length, position, temp_array.get());
+ PopBack(move_chunk_length);
+ }
+
+ Reserve(Size() + length + move_chunk_length);
+ PushBack(insert_this, length);
+ if (move_chunk_length > 0)
+ PushBack(temp_array.get(), move_chunk_length);
+}
+
+void AudioVector::InsertByPushFront(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (position > 0) {
+ // TODO(minyue): see if it is possible to avoid copying to a buffer.
+ temp_array.reset(new int16_t[position]);
+ CopyTo(position, 0, temp_array.get());
+ PopFront(position);
+ }
+
+ Reserve(Size() + length + position);
+ PushFront(insert_this, length);
+ if (position > 0)
+ PushFront(temp_array.get(), position);
+}
+
+void AudioVector::InsertZerosByPushBack(size_t length,
+ size_t position) {
+ const size_t move_chunk_length = Size() - position;
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (move_chunk_length > 0) {
+ temp_array.reset(new int16_t[move_chunk_length]);
+ CopyTo(move_chunk_length, position, temp_array.get());
+ PopBack(move_chunk_length);
}
+
+ Reserve(Size() + length + move_chunk_length);
+
+ const size_t first_zero_chunk_length =
+ std::min(length, capacity_ - end_index_);
+ memset(&array_[end_index_], 0, first_zero_chunk_length * sizeof(int16_t));
+ const size_t remaining_zero_length = length - first_zero_chunk_length;
+ if (remaining_zero_length > 0)
+ memset(array_.get(), 0, remaining_zero_length * sizeof(int16_t));
+ end_index_ = (end_index_ + length) % capacity_;
+
+ if (move_chunk_length > 0)
+ PushBack(temp_array.get(), move_chunk_length);
+}
+
+void AudioVector::InsertZerosByPushFront(size_t length,
+ size_t position) {
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (position > 0) {
+ temp_array.reset(new int16_t[position]);
+ CopyTo(position, 0, temp_array.get());
+ PopFront(position);
+ }
+
+ Reserve(Size() + length + position);
+
+ const size_t first_zero_chunk_length = std::min(length, begin_index_);
+ memset(&array_[begin_index_ - first_zero_chunk_length], 0,
+ first_zero_chunk_length * sizeof(int16_t));
+ const size_t remaining_zero_length = length - first_zero_chunk_length;
+ if (remaining_zero_length > 0)
+ memset(&array_[capacity_ - remaining_zero_length], 0,
+ remaining_zero_length * sizeof(int16_t));
+ begin_index_ = (begin_index_ + capacity_ - length) % capacity_;
+
+ if (position > 0)
+ PushFront(temp_array.get(), position);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
index 15297f9bc8c..756292aa783 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
@@ -37,6 +37,9 @@ class AudioVector {
// |copy_to| will be an exact replica of this object.
virtual void CopyTo(AudioVector* copy_to) const;
+ // Copies |length| values from |position| in this vector to |copy_to|.
+ virtual void CopyTo(size_t length, size_t position, int16_t* copy_to) const;
+
// Prepends the contents of AudioVector |prepend_this| to this object. The
// length of this object is increased with the length of |prepend_this|.
virtual void PushFront(const AudioVector& prepend_this);
@@ -48,6 +51,12 @@ class AudioVector {
// Same as PushFront but will append to the end of this object.
virtual void PushBack(const AudioVector& append_this);
+ // Appends a segment of |append_this| to the end of this object. The segment
+ // starts from |position| and has |length| samples.
+ virtual void PushBack(const AudioVector& append_this,
+ size_t length,
+ size_t position);
+
// Same as PushFront but will append to the end of this object.
virtual void PushBack(const int16_t* append_this, size_t length);
@@ -71,6 +80,15 @@ class AudioVector {
// Like InsertAt, but inserts |length| zero elements at |position|.
virtual void InsertZerosAt(size_t length, size_t position);
+ // Overwrites |length| elements of this AudioVector starting from |position|
+ // with first values in |AudioVector|. The definition of |position|
+ // is the same as for InsertAt(). If |length| and |position| are selected
+ // such that the new data extends beyond the end of the current AudioVector,
+ // the vector is extended to accommodate the new data.
+ virtual void OverwriteAt(const AudioVector& insert_this,
+ size_t length,
+ size_t position);
+
// Overwrites |length| elements of this AudioVector with values taken from the
// array |insert_this|, starting at |position|. The definition of |position|
// is the same as for InsertAt(). If |length| and |position| are selected
@@ -100,11 +118,27 @@ class AudioVector {
void Reserve(size_t n);
+ void InsertByPushBack(const int16_t* insert_this, size_t length,
+ size_t position);
+
+ void InsertByPushFront(const int16_t* insert_this, size_t length,
+ size_t position);
+
+ void InsertZerosByPushBack(size_t length, size_t position);
+
+ void InsertZerosByPushFront(size_t length, size_t position);
+
std::unique_ptr<int16_t[]> array_;
- size_t first_free_ix_; // The first index after the last sample in array_.
- // Note that this index may point outside of array_.
+
size_t capacity_; // Allocated number of samples in the array.
+ // The index of the first sample in |array_|, except when
+ // |begin_index_ == end_index_|, which indicates an empty buffer.
+ size_t begin_index_;
+
+ // The index of the sample after the last sample in |array_|.
+ size_t end_index_;
+
RTC_DISALLOW_COPY_AND_ASSIGN(AudioVector);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
index 08009863455..cee7e586695 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
@@ -82,14 +82,6 @@ TEST_F(AudioVectorTest, PushBackAndCopy) {
EXPECT_TRUE(vec_copy.Empty());
}
-// Try to copy to a NULL pointer. Nothing should happen.
-TEST_F(AudioVectorTest, CopyToNull) {
- AudioVector vec;
- AudioVector* vec_copy = NULL;
- vec.PushBack(array_, array_length());
- vec.CopyTo(vec_copy);
-}
-
// Test the PushBack method with another AudioVector as input argument.
TEST_F(AudioVectorTest, PushBackVector) {
static const size_t kLength = 10;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
index 7e7a6325e97..9cfd6cb40ed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
@@ -17,6 +17,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
namespace webrtc {
@@ -58,10 +59,7 @@ void BackgroundNoise::Update(const AudioMultiVector& input,
ChannelParameters& parameters = channel_parameters_[channel_ix];
int16_t temp_signal_array[kVecLen + kMaxLpcOrder] = {0};
int16_t* temp_signal = &temp_signal_array[kMaxLpcOrder];
- memcpy(temp_signal,
- &input[channel_ix][input.Size() - kVecLen],
- sizeof(int16_t) * kVecLen);
-
+ input[channel_ix].CopyTo(kVecLen, input.Size() - kVecLen, temp_signal);
int32_t sample_energy = CalculateAutoCorrelation(temp_signal, kVecLen,
auto_correlation);
@@ -169,15 +167,10 @@ int16_t BackgroundNoise::ScaleShift(size_t channel) const {
int32_t BackgroundNoise::CalculateAutoCorrelation(
const int16_t* signal, size_t length, int32_t* auto_correlation) const {
- int16_t signal_max = WebRtcSpl_MaxAbsValueW16(signal, length);
- int correlation_scale = kLogVecLen -
- WebRtcSpl_NormW32(signal_max * signal_max);
- correlation_scale = std::max(0, correlation_scale);
-
static const int kCorrelationStep = -1;
- WebRtcSpl_CrossCorrelation(auto_correlation, signal, signal, length,
- kMaxLpcOrder + 1, correlation_scale,
- kCorrelationStep);
+ const int correlation_scale =
+ CrossCorrelationWithAutoShift(signal, signal, length, kMaxLpcOrder + 1,
+ kCorrelationStep, auto_correlation);
// Number of shifts to normalize energy to energy/sample.
int energy_sample_shift = kLogVecLen - correlation_scale;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
index a5b08469bea..90b02daf712 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
@@ -14,7 +14,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
@@ -23,31 +22,23 @@ namespace webrtc {
void ComfortNoise::Reset() {
first_call_ = true;
- internal_error_code_ = 0;
}
int ComfortNoise::UpdateParameters(Packet* packet) {
assert(packet); // Existence is verified by caller.
// Get comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetDecoder(
- packet->header.payloadType);
- if (!cng_decoder) {
+ if (decoder_database_->SetActiveCngDecoder(packet->header.payloadType)
+ != kOK) {
delete [] packet->payload;
delete packet;
return kUnknownPayloadType;
}
- decoder_database_->SetActiveCngDecoder(packet->header.payloadType);
- CNG_dec_inst* cng_inst = cng_decoder->CngDecoderInstance();
- int16_t ret = WebRtcCng_UpdateSid(cng_inst,
- packet->payload,
- packet->payload_length);
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ RTC_DCHECK(cng_decoder);
+ cng_decoder->UpdateSid(rtc::ArrayView<const uint8_t>(
+ packet->payload, packet->payload_length));
delete [] packet->payload;
delete packet;
- if (ret < 0) {
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- LOG(LS_ERROR) << "WebRtcCng_UpdateSid produced " << internal_error_code_;
- return kInternalError;
- }
return kOK;
}
@@ -63,30 +54,31 @@ int ComfortNoise::Generate(size_t requested_length,
}
size_t number_of_samples = requested_length;
- int16_t new_period = 0;
+ bool new_period = false;
if (first_call_) {
// Generate noise and overlap slightly with old data.
number_of_samples = requested_length + overlap_length_;
- new_period = 1;
+ new_period = true;
}
output->AssertSize(number_of_samples);
// Get the decoder from the database.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (!cng_decoder) {
LOG(LS_ERROR) << "Unknwown payload type";
return kUnknownPayloadType;
}
- CNG_dec_inst* cng_inst = cng_decoder->CngDecoderInstance();
- // The expression &(*output)[0][0] is a pointer to the first element in
- // the first channel.
- if (WebRtcCng_Generate(cng_inst, &(*output)[0][0], number_of_samples,
- new_period) < 0) {
+
+ std::unique_ptr<int16_t[]> temp(new int16_t[number_of_samples]);
+ if (!cng_decoder->Generate(
+ rtc::ArrayView<int16_t>(temp.get(), number_of_samples),
+ new_period)) {
// Error returned.
output->Zeros(requested_length);
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- LOG(LS_ERROR) << "WebRtcCng_Generate produced " << internal_error_code_;
+ LOG(LS_ERROR) <<
+ "ComfortNoiseDecoder::Genererate failed to generate comfort noise";
return kInternalError;
}
+ (*output)[0].OverwriteAt(temp.get(), number_of_samples, 0);
if (first_call_) {
// Set tapering window parameters. Values are in Q15.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
index 1fc22586637..f877bf63efb 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
@@ -38,8 +38,7 @@ class ComfortNoise {
first_call_(true),
overlap_length_(5 * fs_hz_ / 8000),
decoder_database_(decoder_database),
- sync_buffer_(sync_buffer),
- internal_error_code_(0) {
+ sync_buffer_(sync_buffer) {
}
// Resets the state. Should be called before each new comfort noise period.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc
new file mode 100644
index 00000000000..ad89ab8a139
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
+
+#include <cstdlib>
+#include <limits>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+// This function decides the overflow-protecting scaling and calls
+// WebRtcSpl_CrossCorrelation.
+int CrossCorrelationWithAutoShift(const int16_t* sequence_1,
+ const int16_t* sequence_2,
+ size_t sequence_1_length,
+ size_t cross_correlation_length,
+ int cross_correlation_step,
+ int32_t* cross_correlation) {
+ // Find the maximum absolute value of sequence_1 and 2.
+ const int16_t max_1 = WebRtcSpl_MaxAbsValueW16(sequence_1, sequence_1_length);
+ const int sequence_2_shift =
+ cross_correlation_step * (static_cast<int>(cross_correlation_length) - 1);
+ const int16_t* sequence_2_start =
+ sequence_2_shift >= 0 ? sequence_2 : sequence_2 + sequence_2_shift;
+ const size_t sequence_2_length =
+ sequence_1_length + std::abs(sequence_2_shift);
+ const int16_t max_2 =
+ WebRtcSpl_MaxAbsValueW16(sequence_2_start, sequence_2_length);
+
+ // In order to avoid overflow when computing the sum we should scale the
+ // samples so that (in_vector_length * max_1 * max_2) will not overflow.
+ // Expected scaling fulfills
+ // 1) sufficient:
+ // sequence_1_length * (max_1 * max_2 >> scaling) <= 0x7fffffff;
+ // 2) necessary:
+ // if (scaling > 0)
+ // sequence_1_length * (max_1 * max_2 >> (scaling - 1)) > 0x7fffffff;
+ // The following calculation fulfills 1) and almost fulfills 2).
+ // There are some corner cases that 2) is not satisfied, e.g.,
+ // max_1 = 17, max_2 = 30848, sequence_1_length = 4095, in such case,
+ // optimal scaling is 0, while the following calculation results in 1.
+ const int32_t factor = (max_1 * max_2) / (std::numeric_limits<int32_t>::max()
+ / static_cast<int32_t>(sequence_1_length));
+ const int scaling = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
+
+ WebRtcSpl_CrossCorrelation(cross_correlation, sequence_1, sequence_2,
+ sequence_1_length, cross_correlation_length,
+ scaling, cross_correlation_step);
+
+ return scaling;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h
new file mode 100644
index 00000000000..db14141027c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
+
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+// The function calculates the cross-correlation between two sequences
+// |sequence_1| and |sequence_2|. |sequence_1| is taken as reference, with
+// |sequence_1_length| as its length. |sequence_2| slides for the calculation of
+// cross-correlation. The result will be saved in |cross_correlation|.
+// |cross_correlation_length| correlation points are calculated.
+// The corresponding lag starts from 0, and increases with a step of
+// |cross_correlation_step|. The result is without normalization. To avoid
+// overflow, the result will be right shifted. The amount of shifts will be
+// returned.
+//
+// Input:
+// - sequence_1 : First sequence (reference).
+// - sequence_2 : Second sequence (sliding during calculation).
+// - sequence_1_length : Length of |sequence_1|.
+// - cross_correlation_length : Number of cross-correlations to calculate.
+// - cross_correlation_step : Step in the lag for the cross-correlation.
+//
+// Output:
+// - cross_correlation : The cross-correlation in Q(-right_shifts)
+//
+// Return:
+// Number of right shifts in cross_correlation.
+
+int CrossCorrelationWithAutoShift(const int16_t* sequence_1,
+ const int16_t* sequence_2,
+ size_t sequence_1_length,
+ size_t cross_correlation_length,
+ int cross_correlation_step,
+ int32_t* cross_correlation);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
index 39bb4662c71..545d1d62455 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -29,26 +29,19 @@ DecisionLogic* DecisionLogic::Create(int fs_hz,
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter) {
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer) {
switch (playout_mode) {
case kPlayoutOn:
case kPlayoutStreaming:
- return new DecisionLogicNormal(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
+ return new DecisionLogicNormal(
+ fs_hz, output_size_samples, playout_mode, decoder_database,
+ packet_buffer, delay_manager, buffer_level_filter, tick_timer);
case kPlayoutFax:
case kPlayoutOff:
- return new DecisionLogicFax(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
+ return new DecisionLogicFax(
+ fs_hz, output_size_samples, playout_mode, decoder_database,
+ packet_buffer, delay_manager, buffer_level_filter, tick_timer);
}
// This line cannot be reached, but must be here to avoid compiler errors.
assert(false);
@@ -61,30 +54,34 @@ DecisionLogic::DecisionLogic(int fs_hz,
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
: decoder_database_(decoder_database),
packet_buffer_(packet_buffer),
delay_manager_(delay_manager),
buffer_level_filter_(buffer_level_filter),
+ tick_timer_(tick_timer),
cng_state_(kCngOff),
- generated_noise_samples_(0),
packet_length_samples_(0),
sample_memory_(0),
prev_time_scale_(false),
- timescale_hold_off_(kMinTimescaleInterval),
+ timescale_countdown_(
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1)),
num_consecutive_expands_(0),
playout_mode_(playout_mode) {
delay_manager_->set_streaming_mode(playout_mode_ == kPlayoutStreaming);
SetSampleRate(fs_hz, output_size_samples);
}
+DecisionLogic::~DecisionLogic() = default;
+
void DecisionLogic::Reset() {
cng_state_ = kCngOff;
- generated_noise_samples_ = 0;
+ noise_fast_forward_ = 0;
packet_length_samples_ = 0;
sample_memory_ = 0;
prev_time_scale_ = false;
- timescale_hold_off_ = 0;
+ timescale_countdown_.reset();
num_consecutive_expands_ = 0;
}
@@ -92,7 +89,8 @@ void DecisionLogic::SoftReset() {
packet_length_samples_ = 0;
sample_memory_ = 0;
prev_time_scale_ = false;
- timescale_hold_off_ = kMinTimescaleInterval;
+ timescale_countdown_ =
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1);
}
void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) {
@@ -107,15 +105,15 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
size_t decoder_frame_length,
const RTPHeader* packet_header,
Modes prev_mode,
- bool play_dtmf, bool* reset_decoder) {
+ bool play_dtmf,
+ size_t generated_noise_samples,
+ bool* reset_decoder) {
if (prev_mode == kModeRfc3389Cng ||
prev_mode == kModeCodecInternalCng ||
prev_mode == kModeExpand) {
// If last mode was CNG (or Expand, since this could be covering up for
- // a lost CNG packet), increase the |generated_noise_samples_| counter.
- generated_noise_samples_ += output_size_samples_;
- // Remember that CNG is on. This is needed if comfort noise is interrupted
- // by DTMF.
+ // a lost CNG packet), remember that CNG is on. This is needed if comfort
+ // noise is interrupted by DTMF.
if (prev_mode == kModeRfc3389Cng) {
cng_state_ = kCngRfc3389On;
} else if (prev_mode == kModeCodecInternalCng) {
@@ -139,7 +137,7 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
return GetDecisionSpecialized(sync_buffer, expand, decoder_frame_length,
packet_header, prev_mode, play_dtmf,
- reset_decoder);
+ reset_decoder, generated_noise_samples);
}
void DecisionLogic::ExpandDecision(Operations operation) {
@@ -152,10 +150,6 @@ void DecisionLogic::ExpandDecision(Operations operation) {
void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples,
Modes prev_mode) {
- const int elapsed_time_ms =
- static_cast<int>(output_size_samples_ / (8 * fs_mult_));
- delay_manager_->UpdateCounters(elapsed_time_ms);
-
// Do not update buffer history if currently playing CNG since it will bias
// the filtered buffer level.
if ((prev_mode != kModeRfc3389Cng) && (prev_mode != kModeCodecInternalCng)) {
@@ -170,14 +164,13 @@ void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples,
int sample_memory_local = 0;
if (prev_time_scale_) {
sample_memory_local = sample_memory_;
- timescale_hold_off_ = kMinTimescaleInterval;
+ timescale_countdown_ =
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval);
}
buffer_level_filter_->Update(buffer_size_packets, sample_memory_local,
packet_length_samples_);
prev_time_scale_ = false;
}
-
- timescale_hold_off_ = std::max(timescale_hold_off_ - 1, 0);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
index 72121b7aac5..008655d1a2b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
@@ -14,6 +14,7 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/neteq/defines.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -39,7 +40,8 @@ class DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer);
// Constructor.
DecisionLogic(int fs_hz,
@@ -48,10 +50,10 @@ class DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer);
- // Destructor.
- virtual ~DecisionLogic() {}
+ virtual ~DecisionLogic();
// Resets object to a clean state.
void Reset();
@@ -79,6 +81,7 @@ class DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
+ size_t generated_noise_samples,
bool* reset_decoder);
// These methods test the |cng_state_| for different conditions.
@@ -101,10 +104,7 @@ class DecisionLogic {
// Accessors and mutators.
void set_sample_memory(int32_t value) { sample_memory_ = value; }
- size_t generated_noise_samples() const { return generated_noise_samples_; }
- void set_generated_noise_samples(size_t value) {
- generated_noise_samples_ = value;
- }
+ size_t noise_fast_forward() const { return noise_fast_forward_; }
size_t packet_length_samples() const { return packet_length_samples_; }
void set_packet_length_samples(size_t value) {
packet_length_samples_ = value;
@@ -113,8 +113,8 @@ class DecisionLogic {
NetEqPlayoutMode playout_mode() const { return playout_mode_; }
protected:
- // The value 6 sets maximum time-stretch rate to about 100 ms/s.
- static const int kMinTimescaleInterval = 6;
+ // The value 5 sets maximum time-stretch rate to about 100 ms/s.
+ static const int kMinTimescaleInterval = 5;
enum CngState {
kCngOff,
@@ -138,7 +138,8 @@ class DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) = 0;
+ bool* reset_decoder,
+ size_t generated_noise_samples) = 0;
// Updates the |buffer_level_filter_| with the current buffer level
// |buffer_size_packets|.
@@ -148,15 +149,16 @@ class DecisionLogic {
const PacketBuffer& packet_buffer_;
DelayManager* delay_manager_;
BufferLevelFilter* buffer_level_filter_;
+ const TickTimer* tick_timer_;
int fs_mult_;
size_t output_size_samples_;
CngState cng_state_; // Remember if comfort noise is interrupted by other
// event (e.g., DTMF).
- size_t generated_noise_samples_;
+ size_t noise_fast_forward_ = 0;
size_t packet_length_samples_;
int sample_memory_;
bool prev_time_scale_;
- int timescale_hold_off_;
+ std::unique_ptr<TickTimer::Countdown> timescale_countdown_;
int num_consecutive_expands_;
const NetEqPlayoutMode playout_mode_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
index ddea64425f2..aace402a7de 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
@@ -26,7 +26,8 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) {
+ bool* reset_decoder,
+ size_t generated_noise_samples) {
assert(playout_mode_ == kPlayoutFax || playout_mode_ == kPlayoutOff);
uint32_t target_timestamp = sync_buffer.end_timestamp();
uint32_t available_timestamp = 0;
@@ -37,7 +38,7 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
decoder_database_->IsComfortNoise(packet_header->payloadType);
}
if (is_cng_packet) {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ if (static_cast<int32_t>((generated_noise_samples + target_timestamp)
- available_timestamp) >= 0) {
// Time to play this packet now.
return kRfc3389Cng;
@@ -70,13 +71,13 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
} else if (target_timestamp == available_timestamp) {
return kNormal;
} else {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ if (static_cast<int32_t>((generated_noise_samples + target_timestamp)
- available_timestamp) >= 0) {
return kNormal;
} else {
// If currently playing comfort noise, continue with that. Do not
- // increase the timestamp counter since generated_noise_samples_ will
- // be increased.
+ // increase the timestamp counter since generated_noise_stopwatch_ in
+ // NetEqImpl will take care of the time-keeping.
if (cng_state_ == kCngRfc3389On) {
return kRfc3389CngNoPacket;
} else if (cng_state_ == kCngInternalOn) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
index 204dcc168a3..6958f908b1c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
@@ -28,11 +28,16 @@ class DecisionLogicFax : public DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
+ : DecisionLogic(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter,
+ tick_timer) {}
protected:
// Returns the operation that should be done next. |sync_buffer| and |expand|
@@ -50,7 +55,8 @@ class DecisionLogicFax : public DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) override;
+ bool* reset_decoder,
+ size_t generated_noise_samples) override;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(DecisionLogicFax);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
index 0252d1cdfaf..37a75d7f5ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
@@ -31,7 +31,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) {
+ bool* reset_decoder,
+ size_t generated_noise_samples) {
assert(playout_mode_ == kPlayoutOn || playout_mode_ == kPlayoutStreaming);
// Guard for errors, to avoid getting stuck in error mode.
if (prev_mode == kModeError) {
@@ -52,7 +53,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
}
if (is_cng_packet) {
- return CngOperation(prev_mode, target_timestamp, available_timestamp);
+ return CngOperation(prev_mode, target_timestamp, available_timestamp,
+ generated_noise_samples);
}
// Handle the case with no packet at all available (except maybe DTMF).
@@ -76,7 +78,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
available_timestamp, target_timestamp, five_seconds_samples)) {
return FuturePacketAvailable(sync_buffer, expand, decoder_frame_length,
prev_mode, target_timestamp,
- available_timestamp, play_dtmf);
+ available_timestamp, play_dtmf,
+ generated_noise_samples);
} else {
// This implies that available_timestamp < target_timestamp, which can
// happen when a new stream or codec is received. Signal for a reset.
@@ -86,10 +89,11 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
uint32_t target_timestamp,
- uint32_t available_timestamp) {
+ uint32_t available_timestamp,
+ size_t generated_noise_samples) {
// Signed difference between target and available timestamp.
int32_t timestamp_diff = static_cast<int32_t>(
- static_cast<uint32_t>(generated_noise_samples_ + target_timestamp) -
+ static_cast<uint32_t>(generated_noise_samples + target_timestamp) -
available_timestamp);
int32_t optimal_level_samp = static_cast<int32_t>(
(delay_manager_->TargetLevel() * packet_length_samples_) >> 8);
@@ -97,9 +101,9 @@ Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
if (excess_waiting_time_samp > optimal_level_samp / 2) {
// The waiting time for this packet will be longer than 1.5
- // times the wanted buffer delay. Advance the clock to cut
+ // times the wanted buffer delay. Apply fast-forward to cut the
// waiting time down to the optimal.
- generated_noise_samples_ += excess_waiting_time_samp;
+ noise_fast_forward_ += excess_waiting_time_samp;
timestamp_diff += excess_waiting_time_samp;
}
@@ -109,6 +113,7 @@ Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
return kRfc3389CngNoPacket;
} else {
// Otherwise, go for the CNG packet now.
+ noise_fast_forward_ = 0;
return kRfc3389Cng;
}
}
@@ -153,7 +158,8 @@ Operations DecisionLogicNormal::FuturePacketAvailable(
Modes prev_mode,
uint32_t target_timestamp,
uint32_t available_timestamp,
- bool play_dtmf) {
+ bool play_dtmf,
+ size_t generated_noise_samples) {
// Required packet is not available, but a future packet is.
// Check if we should continue with an ongoing expand because the new packet
// is too far into the future.
@@ -184,7 +190,7 @@ Operations DecisionLogicNormal::FuturePacketAvailable(
// safety precaution), but make sure that the number of samples in buffer
// is no higher than 4 times the optimal level. (Note that TargetLevel()
// is in Q8.)
- if (static_cast<uint32_t>(generated_noise_samples_ + target_timestamp) >=
+ if (static_cast<uint32_t>(generated_noise_samples + target_timestamp) >=
available_timestamp ||
cur_size_samples >
((delay_manager_->TargetLevel() * packet_length_samples_) >> 8) *
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
index 7465906a381..aa0edf3152a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
@@ -28,11 +28,16 @@ class DecisionLogicNormal : public DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
+ : DecisionLogic(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter,
+ tick_timer) {}
protected:
static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
@@ -54,7 +59,8 @@ class DecisionLogicNormal : public DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) override;
+ bool* reset_decoder,
+ size_t generated_noise_samples) override;
// Returns the operation to do given that the expected packet is not
// available, but a packet further into the future is at hand.
@@ -65,7 +71,8 @@ class DecisionLogicNormal : public DecisionLogic {
Modes prev_mode,
uint32_t target_timestamp,
uint32_t available_timestamp,
- bool play_dtmf);
+ bool play_dtmf,
+ size_t generated_noise_samples);
// Returns the operation to do given that the expected packet is available.
virtual Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
@@ -77,12 +84,16 @@ class DecisionLogicNormal : public DecisionLogic {
private:
// Returns the operation given that the next available packet is a comfort
// noise payload (RFC 3389 only, not codec-internal).
- Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
- uint32_t available_timestamp);
+ Operations CngOperation(Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp,
+ size_t generated_noise_samples);
// Checks if enough time has elapsed since the last successful timescale
// operation was done (i.e., accelerate or preemptive expand).
- bool TimescaleAllowed() const { return timescale_hold_off_ == 0; }
+ bool TimescaleAllowed() const {
+ return !timescale_countdown_ || timescale_countdown_->Finished();
+ }
// Checks if the current (filtered) buffer level is under the target level.
bool UnderTargetLevel() const;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
index 499f9464347..ebb366890b3 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
@@ -11,45 +11,42 @@
// Unit tests for DecisionLogic class and derived classes.
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
TEST(DecisionLogic, CreateAndDestroy) {
int fs_hz = 8000;
int output_size_samples = fs_hz / 100; // Samples per 10 ms.
- DecoderDatabase decoder_database;
- PacketBuffer packet_buffer(10);
- DelayPeakDetector delay_peak_detector;
- DelayManager delay_manager(240, &delay_peak_detector);
+ DecoderDatabase decoder_database(
+ std::unique_ptr<MockAudioDecoderFactory>(new MockAudioDecoderFactory));
+ TickTimer tick_timer;
+ PacketBuffer packet_buffer(10, &tick_timer);
+ DelayPeakDetector delay_peak_detector(&tick_timer);
+ DelayManager delay_manager(240, &delay_peak_detector, &tick_timer);
BufferLevelFilter buffer_level_filter;
- DecisionLogic* logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOn, &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ DecisionLogic* logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutOn, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutStreaming,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutStreaming, &decoder_database,
+ packet_buffer, &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutFax,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutFax, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOff,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutOff, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
index 92d4bab1e4a..4fddf75ce26 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
@@ -19,13 +19,39 @@
namespace webrtc {
-DecoderDatabase::DecoderDatabase()
- : active_decoder_(-1), active_cng_decoder_(-1) {}
+DecoderDatabase::DecoderDatabase(
+ std::unique_ptr<AudioDecoderFactory> decoder_factory)
+ : active_decoder_type_(-1),
+ active_cng_decoder_type_(-1),
+ decoder_factory_(std::move(decoder_factory)) {}
-DecoderDatabase::~DecoderDatabase() {}
+DecoderDatabase::~DecoderDatabase() = default;
-DecoderDatabase::DecoderInfo::~DecoderInfo() {
- if (!external) delete decoder;
+DecoderDatabase::DecoderInfo::DecoderInfo(NetEqDecoder ct,
+ const std::string& nm,
+ int fs,
+ AudioDecoder* ext_dec)
+ : codec_type(ct),
+ name(nm),
+ fs_hz(fs),
+ external_decoder(ext_dec),
+ audio_format_(acm2::RentACodec::NetEqDecoderToSdpAudioFormat(ct)) {}
+
+DecoderDatabase::DecoderInfo::DecoderInfo(DecoderInfo&&) = default;
+DecoderDatabase::DecoderInfo::~DecoderInfo() = default;
+
+AudioDecoder* DecoderDatabase::DecoderInfo::GetDecoder(
+ AudioDecoderFactory* factory) {
+ if (external_decoder) {
+ RTC_DCHECK(!decoder_);
+ return external_decoder;
+ }
+ RTC_DCHECK(audio_format_);
+ if (!decoder_) {
+ decoder_ = factory->MakeAudioDecoder(*audio_format_);
+ }
+ RTC_DCHECK(decoder_) << "Failed to create: " << *audio_format_;
+ return decoder_.get();
}
bool DecoderDatabase::Empty() const { return decoders_.empty(); }
@@ -34,8 +60,8 @@ int DecoderDatabase::Size() const { return static_cast<int>(decoders_.size()); }
void DecoderDatabase::Reset() {
decoders_.clear();
- active_decoder_ = -1;
- active_cng_decoder_ = -1;
+ active_decoder_type_ = -1;
+ active_cng_decoder_type_ = -1;
}
int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
@@ -48,8 +74,9 @@ int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
return kCodecNotSupported;
}
const int fs_hz = CodecSampleRateHz(codec_type);
- DecoderInfo info(codec_type, name, fs_hz, NULL, false);
- auto ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ DecoderInfo info(codec_type, name, fs_hz, nullptr);
+ auto ret =
+ decoders_.insert(std::make_pair(rtp_payload_type, std::move(info)));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
return kDecoderExists;
@@ -75,8 +102,8 @@ int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
return kInvalidPointer;
}
std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, codec_name, fs_hz, decoder, true);
- ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ DecoderInfo info(codec_type, codec_name, fs_hz, decoder);
+ ret = decoders_.insert(std::make_pair(rtp_payload_type, std::move(info)));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
return kDecoderExists;
@@ -89,11 +116,11 @@ int DecoderDatabase::Remove(uint8_t rtp_payload_type) {
// No decoder with that |rtp_payload_type|.
return kDecoderNotFound;
}
- if (active_decoder_ == rtp_payload_type) {
- active_decoder_ = -1; // No active decoder.
+ if (active_decoder_type_ == rtp_payload_type) {
+ active_decoder_type_ = -1; // No active decoder.
}
- if (active_cng_decoder_ == rtp_payload_type) {
- active_cng_decoder_ = -1; // No active CNG decoder.
+ if (active_cng_decoder_type_ == rtp_payload_type) {
+ active_cng_decoder_type_ = -1; // No active CNG decoder.
}
return kOK;
}
@@ -122,7 +149,8 @@ uint8_t DecoderDatabase::GetRtpPayloadType(
}
AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
- if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type)) {
+ if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type) ||
+ IsComfortNoise(rtp_payload_type)) {
// These are not real decoders.
return NULL;
}
@@ -132,13 +160,7 @@ AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
return NULL;
}
DecoderInfo* info = &(*it).second;
- if (!info->decoder) {
- // Create the decoder object.
- AudioDecoder* decoder = CreateAudioDecoder(info->codec_type);
- assert(decoder); // Should not be able to have an unsupported codec here.
- info->decoder = decoder;
- }
- return info->decoder;
+ return info->GetDecoder(decoder_factory_.get());
}
bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
@@ -152,14 +174,16 @@ bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
}
bool DecoderDatabase::IsComfortNoise(uint8_t rtp_payload_type) const {
- if (IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGnb) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGwb) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGswb32kHz) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGswb48kHz)) {
- return true;
- } else {
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
return false;
}
+ const auto& type = it->second.codec_type;
+ return type == NetEqDecoder::kDecoderCNGnb
+ || type == NetEqDecoder::kDecoderCNGwb
+ || type == NetEqDecoder::kDecoderCNGswb32kHz
+ || type == NetEqDecoder::kDecoderCNGswb48kHz;
}
bool DecoderDatabase::IsDtmf(uint8_t rtp_payload_type) const {
@@ -178,37 +202,33 @@ int DecoderDatabase::SetActiveDecoder(uint8_t rtp_payload_type,
// Decoder not found.
return kDecoderNotFound;
}
+ RTC_CHECK(!IsComfortNoise(rtp_payload_type));
assert(new_decoder);
*new_decoder = false;
- if (active_decoder_ < 0) {
+ if (active_decoder_type_ < 0) {
// This is the first active decoder.
*new_decoder = true;
- } else if (active_decoder_ != rtp_payload_type) {
+ } else if (active_decoder_type_ != rtp_payload_type) {
// Moving from one active decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_decoder_);
+ DecoderMap::iterator it = decoders_.find(active_decoder_type_);
if (it == decoders_.end()) {
// Decoder not found. This should not be possible.
assert(false);
return kDecoderNotFound;
}
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
+ it->second.DropDecoder();
*new_decoder = true;
}
- active_decoder_ = rtp_payload_type;
+ active_decoder_type_ = rtp_payload_type;
return kOK;
}
AudioDecoder* DecoderDatabase::GetActiveDecoder() {
- if (active_decoder_ < 0) {
+ if (active_decoder_type_ < 0) {
// No active decoder.
return NULL;
}
- return GetDecoder(active_decoder_);
+ return GetDecoder(active_decoder_type_);
}
int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
@@ -218,31 +238,32 @@ int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
// Decoder not found.
return kDecoderNotFound;
}
- if (active_cng_decoder_ >= 0 && active_cng_decoder_ != rtp_payload_type) {
+ if (active_cng_decoder_type_ >= 0 &&
+ active_cng_decoder_type_ != rtp_payload_type) {
// Moving from one active CNG decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_cng_decoder_);
+ DecoderMap::iterator it = decoders_.find(active_cng_decoder_type_);
if (it == decoders_.end()) {
// Decoder not found. This should not be possible.
assert(false);
return kDecoderNotFound;
}
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
+ // The CNG decoder should never be provided externally.
+ RTC_CHECK(!it->second.external_decoder);
+ active_cng_decoder_.reset();
}
- active_cng_decoder_ = rtp_payload_type;
+ active_cng_decoder_type_ = rtp_payload_type;
return kOK;
}
-AudioDecoder* DecoderDatabase::GetActiveCngDecoder() {
- if (active_cng_decoder_ < 0) {
+ComfortNoiseDecoder* DecoderDatabase::GetActiveCngDecoder() {
+ if (active_cng_decoder_type_ < 0) {
// No active CNG decoder.
return NULL;
}
- return GetDecoder(active_cng_decoder_);
+ if (!active_cng_decoder_) {
+ active_cng_decoder_.reset(new ComfortNoiseDecoder);
+ }
+ return active_cng_decoder_.get();
}
int DecoderDatabase::CheckPayloadTypes(const PacketList& packet_list) const {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
index 01ff0c9fdb3..3a40e08c8a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
@@ -12,10 +12,14 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
#include <map>
+#include <memory>
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h" // NULL
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/typedefs.h"
@@ -34,37 +38,38 @@ class DecoderDatabase {
kInvalidPointer = -6
};
- // Struct used to store decoder info in the database.
- struct DecoderInfo {
- DecoderInfo() = default;
- DecoderInfo(NetEqDecoder ct, int fs, AudioDecoder* dec, bool ext)
- : DecoderInfo(ct, "", fs, dec, ext) {}
+ // Class that stores decoder info in the database.
+ class DecoderInfo {
+ public:
DecoderInfo(NetEqDecoder ct,
const std::string& nm,
int fs,
- AudioDecoder* dec,
- bool ext)
- : codec_type(ct),
- name(nm),
- fs_hz(fs),
- rtp_sample_rate_hz(fs),
- decoder(dec),
- external(ext) {}
+ AudioDecoder* ext_dec);
+ DecoderInfo(DecoderInfo&&);
~DecoderInfo();
- NetEqDecoder codec_type = NetEqDecoder::kDecoderArbitrary;
- std::string name;
- int fs_hz = 8000;
- int rtp_sample_rate_hz = 8000;
- AudioDecoder* decoder = nullptr;
- bool external = false;
+ // Get the AudioDecoder object, creating it first if necessary.
+ AudioDecoder* GetDecoder(AudioDecoderFactory* factory);
+
+ // Delete the AudioDecoder object, unless it's external. (This means we can
+ // always recreate it later if we need it.)
+ void DropDecoder() { decoder_.reset(); }
+
+ const NetEqDecoder codec_type;
+ const std::string name;
+ const int fs_hz;
+ AudioDecoder* const external_decoder;
+
+ private:
+ const rtc::Optional<SdpAudioFormat> audio_format_;
+ std::unique_ptr<AudioDecoder> decoder_;
};
// Maximum value for 8 bits, and an invalid RTP payload type (since it is
// only 7 bits).
static const uint8_t kRtpPayloadTypeError = 0xFF;
- DecoderDatabase();
+ DecoderDatabase(std::unique_ptr<AudioDecoderFactory> decoder_factory);
virtual ~DecoderDatabase();
@@ -142,7 +147,7 @@ class DecoderDatabase {
// Returns the current active comfort noise decoder, or NULL if no active
// comfort noise decoder exists.
- virtual AudioDecoder* GetActiveCngDecoder();
+ virtual ComfortNoiseDecoder* GetActiveCngDecoder();
// Returns kOK if all packets in |packet_list| carry payload types that are
// registered in the database. Otherwise, returns kDecoderNotFound.
@@ -152,8 +157,10 @@ class DecoderDatabase {
typedef std::map<uint8_t, DecoderInfo> DecoderMap;
DecoderMap decoders_;
- int active_decoder_;
- int active_cng_decoder_;
+ int active_decoder_type_;
+ int active_cng_decoder_type_;
+ std::unique_ptr<ComfortNoiseDecoder> active_cng_decoder_;
+ const std::unique_ptr<AudioDecoderFactory> decoder_factory_;
RTC_DISALLOW_COPY_AND_ASSIGN(DecoderDatabase);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
index 85aaef11431..91ca606d65b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -19,17 +19,21 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
namespace webrtc {
TEST(DecoderDatabase, CreateAndDestroy) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
EXPECT_EQ(0, db.Size());
EXPECT_TRUE(db.Empty());
}
TEST(DecoderDatabase, InsertAndRemove) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -43,7 +47,8 @@ TEST(DecoderDatabase, InsertAndRemove) {
}
TEST(DecoderDatabase, GetDecoderInfo) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -53,16 +58,16 @@ TEST(DecoderDatabase, GetDecoderInfo) {
info = db.GetDecoderInfo(kPayloadType);
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
- EXPECT_EQ(NULL, info->decoder);
+ EXPECT_EQ(nullptr, info->external_decoder);
EXPECT_EQ(8000, info->fs_hz);
EXPECT_EQ(kCodecName, info->name);
- EXPECT_FALSE(info->external);
info = db.GetDecoderInfo(kPayloadType + 1); // Other payload type.
EXPECT_TRUE(info == NULL); // Should not be found.
}
TEST(DecoderDatabase, GetRtpPayloadType) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -76,7 +81,7 @@ TEST(DecoderDatabase, GetRtpPayloadType) {
}
TEST(DecoderDatabase, GetDecoder) {
- DecoderDatabase db;
+ DecoderDatabase db(CreateBuiltinAudioDecoderFactory());
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(DecoderDatabase::kOK,
@@ -87,7 +92,8 @@ TEST(DecoderDatabase, GetDecoder) {
}
TEST(DecoderDatabase, TypeTests) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadTypePcmU = 0;
const uint8_t kPayloadTypeCng = 13;
const uint8_t kPayloadTypeDtmf = 100;
@@ -122,7 +128,8 @@ TEST(DecoderDatabase, TypeTests) {
}
TEST(DecoderDatabase, ExternalDecoder) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
MockAudioDecoder decoder;
@@ -139,9 +146,8 @@ TEST(DecoderDatabase, ExternalDecoder) {
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
EXPECT_EQ(kCodecName, info->name);
- EXPECT_EQ(&decoder, info->decoder);
+ EXPECT_EQ(&decoder, info->external_decoder);
EXPECT_EQ(8000, info->fs_hz);
- EXPECT_TRUE(info->external);
// Expect not to delete the decoder when removing it from the database, since
// it was declared externally.
EXPECT_CALL(decoder, Die()).Times(0);
@@ -152,7 +158,8 @@ TEST(DecoderDatabase, ExternalDecoder) {
}
TEST(DecoderDatabase, CheckPayloadTypes) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
// Load a number of payloads into the database. Payload types are 0, 1, ...,
// while the decoder type is the same for all payload types (this does not
// matter for the test).
@@ -196,7 +203,7 @@ TEST(DecoderDatabase, CheckPayloadTypes) {
// Test the methods for setting and getting active speech and CNG decoders.
TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) {
- DecoderDatabase db;
+ DecoderDatabase db(CreateBuiltinAudioDecoderFactory());
// Load payload types.
ASSERT_EQ(DecoderDatabase::kOK,
db.RegisterPayload(0, NetEqDecoder::kDecoderPCMu, "pcmu"));
@@ -233,8 +240,8 @@ TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) {
// Set active CNG codec.
EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveCngDecoder(13));
- decoder = db.GetActiveCngDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ ComfortNoiseDecoder* cng = db.GetActiveCngDecoder();
+ ASSERT_FALSE(cng == NULL); // Should get a decoder here.
// Remove the active CNG decoder, and verify that the active becomes NULL.
EXPECT_EQ(DecoderDatabase::kOK, db.Remove(13));
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
index af49f00f8af..84bda7cf699 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
@@ -24,12 +24,13 @@
namespace webrtc {
DelayManager::DelayManager(size_t max_packets_in_buffer,
- DelayPeakDetector* peak_detector)
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer)
: first_packet_received_(false),
max_packets_in_buffer_(max_packets_in_buffer),
iat_vector_(kMaxIat + 1, 0),
iat_factor_(0),
- packet_iat_count_ms_(0),
+ tick_timer_(tick_timer),
base_target_level_(4), // In Q0 domain.
target_level_(base_target_level_ << 8), // In Q8 domain.
packet_len_ms_(0),
@@ -41,7 +42,6 @@ DelayManager::DelayManager(size_t max_packets_in_buffer,
maximum_delay_ms_(target_level_),
iat_cumulative_sum_(0),
max_iat_cumulative_sum_(0),
- max_timer_ms_(0),
peak_detector_(*peak_detector),
last_pack_cng_or_dtmf_(1) {
assert(peak_detector); // Should never be NULL.
@@ -79,7 +79,7 @@ int DelayManager::Update(uint16_t sequence_number,
if (!first_packet_received_) {
// Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_seq_no_ = sequence_number;
last_timestamp_ = timestamp;
first_packet_received_ = true;
@@ -106,7 +106,7 @@ int DelayManager::Update(uint16_t sequence_number,
// Calculate inter-arrival time (IAT) in integer "packet times"
// (rounding down). This is the value used as index to the histogram
// vector |iat_vector_|.
- int iat_packets = packet_iat_count_ms_ / packet_len_ms;
+ int iat_packets = packet_iat_stopwatch_->ElapsedMs() / packet_len_ms;
if (streaming_mode_) {
UpdateCumulativeSums(packet_len_ms, sequence_number);
@@ -137,7 +137,7 @@ int DelayManager::Update(uint16_t sequence_number,
} // End if (packet_len_ms > 0).
// Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_seq_no_ = sequence_number;
last_timestamp_ = timestamp;
return 0;
@@ -147,7 +147,8 @@ void DelayManager::UpdateCumulativeSums(int packet_len_ms,
uint16_t sequence_number) {
// Calculate IAT in Q8, including fractions of a packet (i.e., more
// accurate than |iat_packets|.
- int iat_packets_q8 = (packet_iat_count_ms_ << 8) / packet_len_ms;
+ int iat_packets_q8 =
+ (packet_iat_stopwatch_->ElapsedMs() << 8) / packet_len_ms;
// Calculate cumulative sum IAT with sequence number compensation. The sum
// is zero if there is no clock-drift.
iat_cumulative_sum_ += (iat_packets_q8 -
@@ -159,9 +160,9 @@ void DelayManager::UpdateCumulativeSums(int packet_len_ms,
if (iat_cumulative_sum_ > max_iat_cumulative_sum_) {
// Found a new maximum.
max_iat_cumulative_sum_ = iat_cumulative_sum_;
- max_timer_ms_ = 0;
+ max_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
}
- if (max_timer_ms_ > kMaxStreamingPeakPeriodMs) {
+ if (max_iat_stopwatch_->ElapsedMs() > kMaxStreamingPeakPeriodMs) {
// Too long since the last maximum was observed; decrease max value.
max_iat_cumulative_sum_ -= kCumulativeSumDrift;
}
@@ -299,7 +300,7 @@ int DelayManager::SetPacketAudioLength(int length_ms) {
}
packet_len_ms_ = length_ms;
peak_detector_.SetPacketAudioLength(packet_len_ms_);
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_pack_cng_or_dtmf_ = 1; // TODO(hlundin): Legacy. Remove?
return 0;
}
@@ -311,8 +312,8 @@ void DelayManager::Reset() {
peak_detector_.Reset();
ResetHistogram(); // Resets target levels too.
iat_factor_ = 0; // Adapt the histogram faster for the first few packets.
- packet_iat_count_ms_ = 0;
- max_timer_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
+ max_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
iat_cumulative_sum_ = 0;
max_iat_cumulative_sum_ = 0;
last_pack_cng_or_dtmf_ = 1;
@@ -340,14 +341,10 @@ bool DelayManager::PeakFound() const {
return peak_detector_.peak_found();
}
-void DelayManager::UpdateCounters(int elapsed_time_ms) {
- packet_iat_count_ms_ += elapsed_time_ms;
- peak_detector_.IncrementCounter(elapsed_time_ms);
- max_timer_ms_ += elapsed_time_ms;
+void DelayManager::ResetPacketIatCount() {
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
}
-void DelayManager::ResetPacketIatCount() { packet_iat_count_ms_ = 0; }
-
// Note that |low_limit| and |higher_limit| are not assigned to
// |minimum_delay_ms_| and |maximum_delay_ms_| defined by the client of this
// class. They are computed from |target_level_| and used for decision making.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
index 785fced15df..6f3c14aea9d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
@@ -13,10 +13,12 @@
#include <string.h> // Provide access to size_t.
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -32,7 +34,9 @@ class DelayManager {
// buffer can hold no more than |max_packets_in_buffer| packets (i.e., this
// is the number of packet slots in the buffer). Supply a PeakDetector
// object to the DelayManager.
- DelayManager(size_t max_packets_in_buffer, DelayPeakDetector* peak_detector);
+ DelayManager(size_t max_packets_in_buffer,
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer);
virtual ~DelayManager();
@@ -75,10 +79,6 @@ class DelayManager {
// DelayPeakDetector object.
virtual bool PeakFound() const;
- // Notifies the counters in DelayManager and DelayPeakDetector that
- // |elapsed_time_ms| have elapsed.
- virtual void UpdateCounters(int elapsed_time_ms);
-
// Reset the inter-arrival time counter to 0.
virtual void ResetPacketIatCount();
@@ -135,7 +135,9 @@ class DelayManager {
const size_t max_packets_in_buffer_; // Capacity of the packet buffer.
IATVector iat_vector_; // Histogram of inter-arrival times.
int iat_factor_; // Forgetting factor for updating the IAT histogram (Q15).
- int packet_iat_count_ms_; // Milliseconds elapsed since last packet.
+ const TickTimer* tick_timer_;
+ // Time elapsed since last packet.
+ std::unique_ptr<TickTimer::Stopwatch> packet_iat_stopwatch_;
int base_target_level_; // Currently preferred buffer level before peak
// detection and streaming mode (Q0).
// TODO(turajs) change the comment according to the implementation of
@@ -153,7 +155,8 @@ class DelayManager {
int maximum_delay_ms_; // Externally set maximum allowed delay.
int iat_cumulative_sum_; // Cumulative sum of delta inter-arrival times.
int max_iat_cumulative_sum_; // Max of |iat_cumulative_sum_|.
- int max_timer_ms_; // Time elapsed since maximum was observed.
+ // Time elapsed since maximum was observed.
+ std::unique_ptr<TickTimer::Stopwatch> max_iat_stopwatch_;
DelayPeakDetector& peak_detector_;
int last_pack_cng_or_dtmf_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
index f231c3da301..3290e9cca68 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
@@ -39,21 +39,19 @@ class DelayManagerTest : public ::testing::Test {
void IncreaseTime(int inc_ms);
DelayManager* dm_;
+ TickTimer tick_timer_;
MockDelayPeakDetector detector_;
uint16_t seq_no_;
uint32_t ts_;
};
DelayManagerTest::DelayManagerTest()
- : dm_(NULL),
- seq_no_(0x1234),
- ts_(0x12345678) {
-}
+ : dm_(NULL), detector_(&tick_timer_), seq_no_(0x1234), ts_(0x12345678) {}
void DelayManagerTest::SetUp() {
EXPECT_CALL(detector_, Reset())
.Times(1);
- dm_ = new DelayManager(kMaxNumberOfPackets, &detector_);
+ dm_ = new DelayManager(kMaxNumberOfPackets, &detector_, &tick_timer_);
}
void DelayManagerTest::SetPacketAudioLength(int lengt_ms) {
@@ -69,9 +67,7 @@ void DelayManagerTest::InsertNextPacket() {
void DelayManagerTest::IncreaseTime(int inc_ms) {
for (int t = 0; t < inc_ms; t += kTimeStepMs) {
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
+ tick_timer_.Increment();
}
}
void DelayManagerTest::TearDown() {
@@ -115,13 +111,6 @@ TEST_F(DelayManagerTest, PeakFound) {
EXPECT_FALSE(dm_->PeakFound());
}
-TEST_F(DelayManagerTest, UpdateCounters) {
- // Expect DelayManager to pass on the counter update to the detector.
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
-}
-
TEST_F(DelayManagerTest, UpdateNormal) {
SetPacketAudioLength(kFrameSizeMs);
// First packet arrival.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
index 712c7788aca..ce9133bdaed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
@@ -12,6 +12,9 @@
#include <algorithm> // max
+#include "webrtc/base/checks.h"
+#include "webrtc/base/safe_conversions.h"
+
namespace webrtc {
// The DelayPeakDetector keeps track of severe inter-arrival times, called
@@ -23,14 +26,15 @@ namespace webrtc {
DelayPeakDetector::~DelayPeakDetector() = default;
-DelayPeakDetector::DelayPeakDetector()
- : peak_found_(false),
- peak_detection_threshold_(0),
- peak_period_counter_ms_(-1) {
+DelayPeakDetector::DelayPeakDetector(const TickTimer* tick_timer)
+ : peak_found_(false),
+ peak_detection_threshold_(0),
+ tick_timer_(tick_timer) {
+ RTC_DCHECK(!peak_period_stopwatch_);
}
void DelayPeakDetector::Reset() {
- peak_period_counter_ms_ = -1; // Indicate that next peak is the first.
+ peak_period_stopwatch_.reset();
peak_found_ = false;
peak_history_.clear();
}
@@ -55,38 +59,40 @@ int DelayPeakDetector::MaxPeakHeight() const {
return max_height;
}
-int DelayPeakDetector::MaxPeakPeriod() const {
- int max_period = -1; // Returns -1 for an empty history.
- std::list<Peak>::const_iterator it;
- for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
- max_period = std::max(max_period, it->period_ms);
+uint64_t DelayPeakDetector::MaxPeakPeriod() const {
+ auto max_period_element = std::max_element(
+ peak_history_.begin(), peak_history_.end(),
+ [](Peak a, Peak b) { return a.period_ms < b.period_ms; });
+ if (max_period_element == peak_history_.end()) {
+ return 0; // |peak_history_| is empty.
}
- return max_period;
+ RTC_DCHECK_GT(max_period_element->period_ms, 0u);
+ return max_period_element->period_ms;
}
bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
if (inter_arrival_time > target_level + peak_detection_threshold_ ||
inter_arrival_time > 2 * target_level) {
// A delay peak is observed.
- if (peak_period_counter_ms_ == -1) {
+ if (!peak_period_stopwatch_) {
// This is the first peak. Reset the period counter.
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= kMaxPeakPeriodMs) {
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
+ } else if (peak_period_stopwatch_->ElapsedMs() <= kMaxPeakPeriodMs) {
// This is not the first peak, and the period is valid.
// Store peak data in the vector.
Peak peak_data;
- peak_data.period_ms = peak_period_counter_ms_;
+ peak_data.period_ms = peak_period_stopwatch_->ElapsedMs();
peak_data.peak_height_packets = inter_arrival_time;
peak_history_.push_back(peak_data);
while (peak_history_.size() > kMaxNumPeaks) {
// Delete the oldest data point.
peak_history_.pop_front();
}
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= 2 * kMaxPeakPeriodMs) {
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
+ } else if (peak_period_stopwatch_->ElapsedMs() <= 2 * kMaxPeakPeriodMs) {
// Invalid peak due to too long period. Reset period counter and start
// looking for next peak.
- peak_period_counter_ms_ = 0;
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
} else {
// More than 2 times the maximum period has elapsed since the last peak
// was registered. It seams that the network conditions have changed.
@@ -97,16 +103,10 @@ bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
return CheckPeakConditions();
}
-void DelayPeakDetector::IncrementCounter(int inc_ms) {
- if (peak_period_counter_ms_ >= 0) {
- peak_period_counter_ms_ += inc_ms;
- }
-}
-
bool DelayPeakDetector::CheckPeakConditions() {
size_t s = peak_history_.size();
if (s >= kMinPeaksToTrigger &&
- peak_period_counter_ms_ <= 2 * MaxPeakPeriod()) {
+ peak_period_stopwatch_->ElapsedMs() <= 2 * MaxPeakPeriod()) {
peak_found_ = true;
} else {
peak_found_ = false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
index 69433b45248..f57d3bd71e5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
@@ -14,14 +14,16 @@
#include <string.h> // size_t
#include <list>
+#include <memory>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
class DelayPeakDetector {
public:
- DelayPeakDetector();
+ DelayPeakDetector(const TickTimer* tick_timer);
virtual ~DelayPeakDetector();
virtual void Reset();
@@ -37,20 +39,15 @@ class DelayPeakDetector {
// delay peaks have been observed recently. The unit is number of packets.
virtual int MaxPeakHeight() const;
- // Calculates and returns the maximum delay peak distance in ms.
- // Returns -1 if no delay peaks have been observed recently.
- virtual int MaxPeakPeriod() const;
+ // Calculates and returns the maximum delay peak distance in ms (strictly
+ // larger than 0), or 0 if no delay peaks have been observed recently.
+ virtual uint64_t MaxPeakPeriod() const;
// Updates the DelayPeakDetector with a new inter-arrival time (in packets)
// and the current target buffer level (needed to decide if a peak is observed
// or not). Returns true if peak-mode is active, false if not.
virtual bool Update(int inter_arrival_time, int target_level);
- // Increments the |peak_period_counter_ms_| with |inc_ms|. Only increments
- // the counter if it is non-negative. A negative denotes that no peak has
- // been observed.
- virtual void IncrementCounter(int inc_ms);
-
private:
static const size_t kMaxNumPeaks = 8;
static const size_t kMinPeaksToTrigger = 2;
@@ -58,7 +55,7 @@ class DelayPeakDetector {
static const int kMaxPeakPeriodMs = 10000;
typedef struct {
- int period_ms;
+ uint64_t period_ms;
int peak_height_packets;
} Peak;
@@ -67,7 +64,8 @@ class DelayPeakDetector {
std::list<Peak> peak_history_;
bool peak_found_;
int peak_detection_threshold_;
- int peak_period_counter_ms_;
+ const TickTimer* tick_timer_;
+ std::unique_ptr<TickTimer::Stopwatch> peak_period_stopwatch_;
RTC_DISALLOW_COPY_AND_ASSIGN(DelayPeakDetector);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
index c40f3991b04..32b36b25ef4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
@@ -17,22 +17,25 @@
namespace webrtc {
TEST(DelayPeakDetector, CreateAndDestroy) {
- DelayPeakDetector* detector = new DelayPeakDetector();
+ TickTimer tick_timer;
+ DelayPeakDetector* detector = new DelayPeakDetector(&tick_timer);
EXPECT_FALSE(detector->peak_found());
delete detector;
}
TEST(DelayPeakDetector, EmptyHistory) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
EXPECT_EQ(-1, detector.MaxPeakHeight());
- EXPECT_EQ(-1, detector.MaxPeakPeriod());
+ EXPECT_EQ(0u, detector.MaxPeakPeriod());
}
// Inject a series of packet arrivals into the detector. Three of the packets
// have suffered delays. After the third delay peak, peak-mode is expected to
// start. This should then continue until it is disengaged due to lack of peaks.
TEST(DelayPeakDetector, TriggerPeakMode) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
const int kPacketSizeMs = 30;
detector.SetPacketAudioLength(kPacketSizeMs);
@@ -52,7 +55,7 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
// Third delay peak. Trigger peak-mode after this packet.
arrival_times_ms[400] += kPeakDelayMs;
// The second peak period is the longest, 200 packets.
- const int kWorstPeakPeriod = 200 * kPacketSizeMs;
+ const uint64_t kWorstPeakPeriod = 200 * kPacketSizeMs;
int peak_mode_start_ms = arrival_times_ms[400];
// Expect to disengage after no peaks are observed for two period times.
int peak_mode_end_ms = peak_mode_start_ms + 2 * kWorstPeakPeriod;
@@ -74,7 +77,7 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
}
++next;
}
- detector.IncrementCounter(10);
+ tick_timer.Increment();
time += 10; // Increase time 10 ms.
}
}
@@ -83,7 +86,8 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
// 2, in order to raise the bar for delay peaks to inter-arrival times > 4.
// The delay pattern has peaks with delay = 3, thus should not trigger.
TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
const int kPacketSizeMs = 30;
detector.SetPacketAudioLength(kPacketSizeMs);
@@ -114,7 +118,7 @@ TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
++next;
}
- detector.IncrementCounter(10);
+ tick_timer.Increment();
time += 10; // Increase time 10 ms.
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
index 4188914c86c..32756650942 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
@@ -80,6 +80,22 @@ int DspHelper::RampSignal(int16_t* signal,
return RampSignal(signal, length, factor, increment, signal);
}
+int DspHelper::RampSignal(AudioVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment) {
+ int factor_q20 = (factor << 6) + 32;
+ // TODO(hlundin): Add 32 to factor_q20 when converting back to Q14?
+ for (size_t i = start_index; i < start_index + length; ++i) {
+ (*signal)[i] = (factor * (*signal)[i] + 8192) >> 14;
+ factor_q20 += increment;
+ factor_q20 = std::max(factor_q20, 0); // Never go negative.
+ factor = std::min(factor_q20 >> 6, 16384);
+ }
+ return factor;
+}
+
int DspHelper::RampSignal(AudioMultiVector* signal,
size_t start_index,
size_t length,
@@ -94,7 +110,7 @@ int DspHelper::RampSignal(AudioMultiVector* signal,
// Loop over the channels, starting at the same |factor| each time.
for (size_t channel = 0; channel < signal->Channels(); ++channel) {
end_factor =
- RampSignal(&(*signal)[channel][start_index], length, factor, increment);
+ RampSignal(&(*signal)[channel], start_index, length, factor, increment);
}
return end_factor;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
index 269c2eb0f25..23543fe383e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
@@ -67,6 +67,13 @@ class DspHelper {
// Same as above, but processes |length| samples from |signal|, starting at
// |start_index|.
+ static int RampSignal(AudioVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment);
+
+ // Same as above, but for an AudioMultiVector.
static int RampSignal(AudioMultiVector* signal,
size_t start_index,
size_t length,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
index ef7af46597e..963f4bdb6c0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
@@ -19,6 +19,7 @@
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/random_vector.h"
#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
@@ -111,25 +112,33 @@ int Expand::Process(AudioMultiVector* output) {
// Use only expand_vector0.
assert(expansion_vector_position + temp_length <=
parameters.expand_vector0.Size());
- memcpy(voiced_vector_storage,
- &parameters.expand_vector0[expansion_vector_position],
- sizeof(int16_t) * temp_length);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ voiced_vector_storage);
} else if (current_lag_index_ == 1) {
+ std::unique_ptr<int16_t[]> temp_0(new int16_t[temp_length]);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ temp_0.get());
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[temp_length]);
+ parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position,
+ temp_1.get());
// Mix 3/4 of expand_vector0 with 1/4 of expand_vector1.
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 3,
- &parameters.expand_vector1[expansion_vector_position], 1, 2,
- voiced_vector_storage, temp_length);
+ WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 3, temp_1.get(), 1, 2,
+ voiced_vector_storage, temp_length);
} else if (current_lag_index_ == 2) {
// Mix 1/2 of expand_vector0 with 1/2 of expand_vector1.
assert(expansion_vector_position + temp_length <=
parameters.expand_vector0.Size());
assert(expansion_vector_position + temp_length <=
parameters.expand_vector1.Size());
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 1,
- &parameters.expand_vector1[expansion_vector_position], 1, 1,
- voiced_vector_storage, temp_length);
+
+ std::unique_ptr<int16_t[]> temp_0(new int16_t[temp_length]);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ temp_0.get());
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[temp_length]);
+ parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position,
+ temp_1.get());
+ WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 1, temp_1.get(), 1, 1,
+ voiced_vector_storage, temp_length);
}
// Get tapering window parameters. Values are in Q15.
@@ -298,8 +307,7 @@ int Expand::Process(AudioMultiVector* output) {
} else {
assert(output->Size() == current_lag);
}
- memcpy(&(*output)[channel_ix][0], temp_data,
- sizeof(temp_data[0]) * current_lag);
+ (*output)[channel_ix].OverwriteAt(temp_data, current_lag, 0);
}
// Increase call number and cap it.
@@ -326,6 +334,17 @@ void Expand::SetParametersForMergeAfterExpand() {
stop_muting_ = true;
}
+bool Expand::Muted() const {
+ if (first_expand_ || stop_muting_)
+ return false;
+ RTC_DCHECK(channel_parameters_);
+ for (size_t ch = 0; ch < num_channels_; ++ch) {
+ if (channel_parameters_[ch].mute_factor != 0)
+ return false;
+ }
+ return true;
+}
+
size_t Expand::overlap_length() const {
return overlap_length_;
}
@@ -372,19 +391,20 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
size_t fs_mult_lpc_analysis_len = fs_mult * kLpcAnalysisLength;
const size_t signal_length = static_cast<size_t>(256 * fs_mult);
- const int16_t* audio_history =
- &(*sync_buffer_)[0][sync_buffer_->Size() - signal_length];
+
+ const size_t audio_history_position = sync_buffer_->Size() - signal_length;
+ std::unique_ptr<int16_t[]> audio_history(new int16_t[signal_length]);
+ (*sync_buffer_)[0].CopyTo(signal_length, audio_history_position,
+ audio_history.get());
// Initialize.
InitializeForAnExpandPeriod();
// Calculate correlation in downsampled domain (4 kHz sample rate).
- int correlation_scale;
size_t correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
// If it is decided to break bit-exactness |correlation_length| should be
// initialized to the return value of Correlation().
- Correlation(audio_history, signal_length, correlation_vector,
- &correlation_scale);
+ Correlation(audio_history.get(), signal_length, correlation_vector);
// Find peaks in correlation vector.
DspHelper::PeakDetection(correlation_vector, correlation_length,
@@ -455,7 +475,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
&audio_history[signal_length - correlation_length - start_index
- correlation_lags],
correlation_length + start_index + correlation_lags - 1);
- correlation_scale = (31 - WebRtcSpl_NormW32(signal_max * signal_max)) +
+ int correlation_scale = (31 - WebRtcSpl_NormW32(signal_max * signal_max)) +
(31 - WebRtcSpl_NormW32(static_cast<int32_t>(correlation_length))) - 31;
correlation_scale = std::max(0, correlation_scale);
@@ -541,12 +561,14 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
parameters.expand_vector1.Extend(
expansion_length - parameters.expand_vector1.Size());
}
- WebRtcSpl_AffineTransformVector(&parameters.expand_vector1[0],
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[expansion_length]);
+ WebRtcSpl_AffineTransformVector(temp_1.get(),
const_cast<int16_t*>(vector2),
amplitude_ratio,
4096,
13,
expansion_length);
+ parameters.expand_vector1.OverwriteAt(temp_1.get(), expansion_length, 0);
} else {
// Energy change constraint not fulfilled. Only use last vector.
parameters.expand_vector0.Clear();
@@ -582,13 +604,6 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
}
// Calculate the LPC and the gain of the filters.
- // Calculate scale value needed for auto-correlation.
- correlation_scale = WebRtcSpl_MaxAbsValueW16(
- &(audio_history[signal_length - fs_mult_lpc_analysis_len]),
- fs_mult_lpc_analysis_len);
-
- correlation_scale = std::min(16 - WebRtcSpl_NormW32(correlation_scale), 0);
- correlation_scale = std::max(correlation_scale * 2 + 7, 0);
// Calculate kUnvoicedLpcOrder + 1 lags of the auto-correlation function.
size_t temp_index = signal_length - fs_mult_lpc_analysis_len -
@@ -601,11 +616,9 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
memcpy(&temp_signal[kUnvoicedLpcOrder],
&audio_history[temp_index + kUnvoicedLpcOrder],
sizeof(int16_t) * fs_mult_lpc_analysis_len);
- WebRtcSpl_CrossCorrelation(auto_correlation,
- &temp_signal[kUnvoicedLpcOrder],
- &temp_signal[kUnvoicedLpcOrder],
- fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1,
- correlation_scale, -1);
+ CrossCorrelationWithAutoShift(
+ &temp_signal[kUnvoicedLpcOrder], &temp_signal[kUnvoicedLpcOrder],
+ fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1, -1, auto_correlation);
delete [] temp_signal;
// Verify that variance is positive.
@@ -766,8 +779,7 @@ Expand::ChannelParameters::ChannelParameters()
void Expand::Correlation(const int16_t* input,
size_t input_length,
- int16_t* output,
- int* output_scale) const {
+ int16_t* output) const {
// Set parameters depending on sample rate.
const int16_t* filter_coefficients;
size_t num_coefficients;
@@ -814,13 +826,11 @@ void Expand::Correlation(const int16_t* input,
downsampled_input, norm_shift);
int32_t correlation[kNumCorrelationLags];
- static const int kCorrelationShift = 6;
- WebRtcSpl_CrossCorrelation(
- correlation,
+ CrossCorrelationWithAutoShift(
&downsampled_input[kDownsampledLength - kCorrelationLength],
&downsampled_input[kDownsampledLength - kCorrelationLength
- kCorrelationStartLag],
- kCorrelationLength, kNumCorrelationLags, kCorrelationShift, -1);
+ kCorrelationLength, kNumCorrelationLags, -1, correlation);
// Normalize and move data from 32-bit to 16-bit vector.
int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
@@ -829,8 +839,6 @@ void Expand::Correlation(const int16_t* input,
std::max(18 - WebRtcSpl_NormW32(max_correlation), 0));
WebRtcSpl_VectorBitShiftW32ToW16(output, kNumCorrelationLags, correlation,
norm_shift2);
- // Total scale factor (right shifts) of correlation value.
- *output_scale = 2 * norm_shift + kCorrelationShift + norm_shift2;
}
void Expand::UpdateLagIndex() {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
index 7f61bf3b18c..0feba3693a1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
@@ -62,6 +62,10 @@ class Expand {
return channel_parameters_[channel].mute_factor;
}
+ // Returns true if expansion has been faded down to zero amplitude (for all
+ // channels); false otherwise.
+ bool Muted() const;
+
// Accessors and mutators.
virtual size_t overlap_length() const;
size_t max_lag() const { return max_lag_; }
@@ -120,12 +124,10 @@ class Expand {
// Calculate the auto-correlation of |input|, with length |input_length|
// samples. The correlation is calculated from a downsampled version of
- // |input|, and is written to |output|. The scale factor is written to
- // |output_scale|.
+ // |input|, and is written to |output|.
void Correlation(const int16_t* input,
size_t input_length,
- int16_t* output,
- int* output_scale) const;
+ int16_t* output) const;
void UpdateLagIndex();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
index 1441704102d..f19487ab17d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
@@ -93,8 +93,9 @@ class ExpandTest : public ::testing::Test {
ASSERT_TRUE(input_file_.Seek(speech_start_samples));
// Pre-load the sync buffer with speech data.
- ASSERT_TRUE(
- input_file_.Read(sync_buffer_.Size(), &sync_buffer_.Channel(0)[0]));
+ std::unique_ptr<int16_t[]> temp(new int16_t[sync_buffer_.Size()]);
+ ASSERT_TRUE(input_file_.Read(sync_buffer_.Size(), temp.get()));
+ sync_buffer_.Channel(0).OverwriteAt(temp.get(), sync_buffer_.Size(), 0);
ASSERT_EQ(1u, num_channels_) << "Fix: Must populate all channels.";
}
@@ -169,6 +170,37 @@ TEST_F(ExpandTest, CheckOutageStatsAfterReset) {
statistics_.last_outage_duration_ms());
}
+namespace {
+// Runs expand until Muted() returns true. Times out after 1000 calls.
+void ExpandUntilMuted(size_t num_channels, Expand* expand) {
+ EXPECT_FALSE(expand->Muted()) << "Instance is muted from the start";
+ AudioMultiVector output(num_channels);
+ int num_calls = 0;
+ while (!expand->Muted()) {
+ ASSERT_LT(num_calls++, 1000) << "Test timed out";
+ EXPECT_EQ(0, expand->Process(&output));
+ }
+}
+} // namespace
+
+// Verifies that Muted() returns true after a long expand period. Also verifies
+// that Muted() is reset to false after calling Reset(),
+// SetParametersForMergeAfterExpand() and SetParametersForNormalAfterExpand().
+TEST_F(ExpandTest, Muted) {
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.Reset();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.SetParametersForMergeAfterExpand();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+
+ expand_.Reset(); // Must reset in order to start a new expand period.
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.SetParametersForNormalAfterExpand();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+}
+
// TODO(hlundin): Write more tests.
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
index 89b0c543244..3a9de1d2606 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
@@ -93,6 +93,7 @@ class NetEq {
BackgroundNoiseMode background_noise_mode;
NetEqPlayoutMode playout_mode;
bool enable_fast_accelerate;
+ bool enable_muted_state = false;
};
enum ReturnCodes {
@@ -161,8 +162,12 @@ class NetEq {
// |num_channels_|, |sample_rate_hz_|, |samples_per_channel_|, and
// |vad_activity_| are updated upon success. If an error is returned, some
// fields may not have been updated.
+ // If muted state is enabled (through Config::enable_muted_state), |muted|
+ // may be set to true after a prolonged expand period. When this happens, the
+ // |data_| in |audio_frame| is not written, but should be interpreted as being
+ // all zeros.
// Returns kOK on success, or kFail in case of an error.
- virtual int GetAudio(AudioFrame* audio_frame) = 0;
+ virtual int GetAudio(AudioFrame* audio_frame, bool* muted) = 0;
// Associates |rtp_payload_type| with |codec| and |codec_name|, and stores the
// information in the codec database. Returns 0 on success, -1 on failure.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
index 9aed91f7887..299682f60d4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
@@ -18,6 +18,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
@@ -38,6 +39,8 @@ Merge::Merge(int fs_hz,
assert(num_channels_ > 0);
}
+Merge::~Merge() = default;
+
size_t Merge::Process(int16_t* input, size_t input_length,
int16_t* external_mute_factor_array,
AudioMultiVector* output) {
@@ -60,13 +63,16 @@ size_t Merge::Process(int16_t* input, size_t input_length,
size_t best_correlation_index = 0;
size_t output_length = 0;
+ std::unique_ptr<int16_t[]> input_channel(
+ new int16_t[input_length_per_channel]);
+ std::unique_ptr<int16_t[]> expanded_channel(new int16_t[expanded_length]);
for (size_t channel = 0; channel < num_channels_; ++channel) {
- int16_t* input_channel = &input_vector[channel][0];
- int16_t* expanded_channel = &expanded_[channel][0];
- int16_t expanded_max, input_max;
+ input_vector[channel].CopyTo(
+ input_length_per_channel, 0, input_channel.get());
+ expanded_[channel].CopyTo(expanded_length, 0, expanded_channel.get());
+
int16_t new_mute_factor = SignalScaling(
- input_channel, input_length_per_channel, expanded_channel,
- &expanded_max, &input_max);
+ input_channel.get(), input_length_per_channel, expanded_channel.get());
// Adjust muting factor (product of "main" muting factor and expand muting
// factor).
@@ -84,18 +90,16 @@ size_t Merge::Process(int16_t* input, size_t input_length,
// Downsample, correlate, and find strongest correlation period for the
// master (i.e., first) channel only.
// Downsample to 4kHz sample rate.
- Downsample(input_channel, input_length_per_channel, expanded_channel,
- expanded_length);
+ Downsample(input_channel.get(), input_length_per_channel,
+ expanded_channel.get(), expanded_length);
// Calculate the lag of the strongest correlation period.
best_correlation_index = CorrelateAndPeakSearch(
- expanded_max, input_max, old_length,
- input_length_per_channel, expand_period);
+ old_length, input_length_per_channel, expand_period);
}
- static const int kTempDataSize = 3600;
- int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
- int16_t* decoded_output = temp_data + best_correlation_index;
+ temp_data_.resize(input_length_per_channel + best_correlation_index);
+ int16_t* decoded_output = temp_data_.data() + best_correlation_index;
// Mute the new decoded data if needed (and unmute it linearly).
// This is the overlapping part of expanded_signal.
@@ -109,7 +113,7 @@ size_t Merge::Process(int16_t* input, size_t input_length,
// and so on.
int increment = 4194 / fs_mult_;
*external_mute_factor =
- static_cast<int16_t>(DspHelper::RampSignal(input_channel,
+ static_cast<int16_t>(DspHelper::RampSignal(input_channel.get(),
interpolation_length,
*external_mute_factor,
increment));
@@ -129,10 +133,10 @@ size_t Merge::Process(int16_t* input, size_t input_length,
int16_t increment =
static_cast<int16_t>(16384 / (interpolation_length + 1)); // In Q14.
int16_t mute_factor = 16384 - increment;
- memmove(temp_data, expanded_channel,
+ memmove(temp_data_.data(), expanded_channel.get(),
sizeof(int16_t) * best_correlation_index);
DspHelper::CrossFade(&expanded_channel[best_correlation_index],
- input_channel, interpolation_length,
+ input_channel.get(), interpolation_length,
&mute_factor, increment, decoded_output);
output_length = best_correlation_index + input_length_per_channel;
@@ -142,8 +146,7 @@ size_t Merge::Process(int16_t* input, size_t input_length,
} else {
assert(output->Size() == output_length);
}
- memcpy(&(*output)[channel][0], temp_data,
- sizeof(temp_data[0]) * output_length);
+ (*output)[channel].OverwriteAt(temp_data_.data(), output_length, 0);
}
// Copy back the first part of the data to |sync_buffer_| and remove it from
@@ -204,29 +207,26 @@ size_t Merge::GetExpandedSignal(size_t* old_length, size_t* expand_period) {
}
int16_t Merge::SignalScaling(const int16_t* input, size_t input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const {
+ const int16_t* expanded_signal) const {
// Adjust muting factor if new vector is more or less of the BGN energy.
const size_t mod_input_length =
std::min(static_cast<size_t>(64 * fs_mult_), input_length);
- *expanded_max = WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
- *input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
-
- // Calculate energy of expanded signal.
- // |log_fs_mult| is log2(fs_mult_), but is not exact for 48000 Hz.
- int log_fs_mult = 30 - WebRtcSpl_NormW32(fs_mult_);
- int expanded_shift = 6 + log_fs_mult
- - WebRtcSpl_NormW32(*expanded_max * *expanded_max);
- expanded_shift = std::max(expanded_shift, 0);
+ const int16_t expanded_max =
+ WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
+ int32_t factor = (expanded_max * expanded_max) /
+ (std::numeric_limits<int32_t>::max() /
+ static_cast<int32_t>(mod_input_length));
+ const int expanded_shift = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
int32_t energy_expanded = WebRtcSpl_DotProductWithScale(expanded_signal,
expanded_signal,
mod_input_length,
expanded_shift);
// Calculate energy of input signal.
- int input_shift = 6 + log_fs_mult -
- WebRtcSpl_NormW32(*input_max * *input_max);
- input_shift = std::max(input_shift, 0);
+ const int16_t input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
+ factor = (input_max * input_max) / (std::numeric_limits<int32_t>::max() /
+ static_cast<int32_t>(mod_input_length));
+ const int input_shift = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
int32_t energy_input = WebRtcSpl_DotProductWithScale(input, input,
mod_input_length,
input_shift);
@@ -307,22 +307,17 @@ void Merge::Downsample(const int16_t* input, size_t input_length,
}
}
-size_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- size_t start_position, size_t input_length,
+size_t Merge::CorrelateAndPeakSearch(size_t start_position, size_t input_length,
size_t expand_period) const {
// Calculate correlation without any normalization.
const size_t max_corr_length = kMaxCorrelationLength;
size_t stop_position_downsamp =
std::min(max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
- int correlation_shift = 0;
- if (expanded_max * input_max > 26843546) {
- correlation_shift = 3;
- }
int32_t correlation[kMaxCorrelationLength];
- WebRtcSpl_CrossCorrelation(correlation, input_downsampled_,
- expanded_downsampled_, kInputDownsampLength,
- stop_position_downsamp, correlation_shift, 1);
+ CrossCorrelationWithAutoShift(input_downsampled_, expanded_downsampled_,
+ kInputDownsampLength, stop_position_downsamp, 1,
+ correlation);
// Normalize correlation to 14 bits and copy to a 16-bit array.
const size_t pad_length = expand_->overlap_length() - 1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
index a168502c271..48f09a16727 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
@@ -37,7 +37,7 @@ class Merge {
size_t num_channels,
Expand* expand,
SyncBuffer* sync_buffer);
- virtual ~Merge() {}
+ virtual ~Merge();
// The main method to produce the audio data. The decoded data is supplied in
// |input|, having |input_length| samples in total for all channels
@@ -69,11 +69,10 @@ class Merge {
// of samples that were taken from the |sync_buffer_|.
size_t GetExpandedSignal(size_t* old_length, size_t* expand_period);
- // Analyzes |input| and |expanded_signal| to find maximum values. Returns
- // a muting factor (Q14) to be used on the new data.
+ // Analyzes |input| and |expanded_signal| and returns muting factor (Q14) to
+ // be used on the new data.
int16_t SignalScaling(const int16_t* input, size_t input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const;
+ const int16_t* expanded_signal) const;
// Downsamples |input| (|input_length| samples) and |expanded_signal| to
// 4 kHz sample rate. The downsampled signals are written to
@@ -84,8 +83,7 @@ class Merge {
// Calculates cross-correlation between |input_downsampled_| and
// |expanded_downsampled_|, and finds the correlation maximum. The maximizing
// lag is returned.
- size_t CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- size_t start_position, size_t input_length,
+ size_t CorrelateAndPeakSearch(size_t start_position, size_t input_length,
size_t expand_period) const;
const int fs_mult_; // fs_hz_ / 8000.
@@ -95,6 +93,7 @@ class Merge {
int16_t expanded_downsampled_[kExpandDownsampLength];
int16_t input_downsampled_[kInputDownsampLength];
AudioMultiVector expanded_;
+ std::vector<int16_t> temp_data_;
RTC_DISALLOW_COPY_AND_ASSIGN(Merge);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
index 1b4a3c9da5b..60ae0f6501e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -21,6 +21,7 @@ namespace webrtc {
class MockDecoderDatabase : public DecoderDatabase {
public:
+ MockDecoderDatabase() : DecoderDatabase(nullptr) {}
virtual ~MockDecoderDatabase() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_CONST_METHOD0(Empty,
@@ -59,7 +60,7 @@ class MockDecoderDatabase : public DecoderDatabase {
MOCK_METHOD1(SetActiveCngDecoder,
int(uint8_t rtp_payload_type));
MOCK_METHOD0(GetActiveCngDecoder,
- AudioDecoder*());
+ ComfortNoiseDecoder*());
MOCK_CONST_METHOD1(CheckPayloadTypes,
int(const PacketList& packet_list));
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
index 6fb85854d77..7ceea70621f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
@@ -20,8 +20,9 @@ namespace webrtc {
class MockDelayManager : public DelayManager {
public:
MockDelayManager(size_t max_packets_in_buffer,
- DelayPeakDetector* peak_detector)
- : DelayManager(max_packets_in_buffer, peak_detector) {}
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer)
+ : DelayManager(max_packets_in_buffer, peak_detector, tick_timer) {}
virtual ~MockDelayManager() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_CONST_METHOD0(iat_vector,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
index fa5cd7ed061..5564fba312c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
@@ -19,15 +19,16 @@ namespace webrtc {
class MockDelayPeakDetector : public DelayPeakDetector {
public:
+ MockDelayPeakDetector(const TickTimer* tick_timer)
+ : DelayPeakDetector(tick_timer) {}
virtual ~MockDelayPeakDetector() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Reset, void());
MOCK_METHOD1(SetPacketAudioLength, void(int length_ms));
MOCK_METHOD0(peak_found, bool());
MOCK_CONST_METHOD0(MaxPeakHeight, int());
- MOCK_CONST_METHOD0(MaxPeakPeriod, int());
+ MOCK_CONST_METHOD0(MaxPeakPeriod, uint64_t());
MOCK_METHOD2(Update, bool(int inter_arrival_time, int target_level));
- MOCK_METHOD1(IncrementCounter, void(int inc_ms));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 97e54d83a5e..6bb95901d8c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -19,8 +19,8 @@ namespace webrtc {
class MockPacketBuffer : public PacketBuffer {
public:
- MockPacketBuffer(size_t max_number_of_packets)
- : PacketBuffer(max_number_of_packets) {}
+ MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer)
+ : PacketBuffer(max_number_of_packets, tick_timer) {}
virtual ~MockPacketBuffer() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Flush,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
index c31dbdc1a3c..2d1ce724cab 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
@@ -10,21 +10,10 @@
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include <memory>
#include <sstream>
-#include "webrtc/modules/audio_coding/neteq/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
-#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
-#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
namespace webrtc {
@@ -37,41 +26,16 @@ std::string NetEq::Config::ToString() const {
<< ", max_packets_in_buffer=" << max_packets_in_buffer
<< ", background_noise_mode=" << background_noise_mode
<< ", playout_mode=" << playout_mode
- << ", enable_fast_accelerate=" << enable_fast_accelerate;
+ << ", enable_fast_accelerate="
+ << (enable_fast_accelerate ? " true": "false")
+ << ", enable_muted_state=" << (enable_muted_state ? " true": "false");
return ss.str();
}
// Creates all classes needed and inject them into a new NetEqImpl object.
// Return the new object.
NetEq* NetEq::Create(const NetEq::Config& config) {
- BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
- DecoderDatabase* decoder_database = new DecoderDatabase;
- DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
- DelayManager* delay_manager =
- new DelayManager(config.max_packets_in_buffer, delay_peak_detector);
- delay_manager->SetMaximumDelay(config.max_delay_ms);
- DtmfBuffer* dtmf_buffer = new DtmfBuffer(config.sample_rate_hz);
- DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
- PacketBuffer* packet_buffer = new PacketBuffer(config.max_packets_in_buffer);
- PayloadSplitter* payload_splitter = new PayloadSplitter;
- TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
- AccelerateFactory* accelerate_factory = new AccelerateFactory;
- ExpandFactory* expand_factory = new ExpandFactory;
- PreemptiveExpandFactory* preemptive_expand_factory =
- new PreemptiveExpandFactory;
- return new NetEqImpl(config,
- buffer_level_filter,
- decoder_database,
- delay_manager,
- delay_peak_detector,
- dtmf_buffer,
- dtmf_tone_generator,
- packet_buffer,
- payload_splitter,
- timestamp_scaler,
- accelerate_factory,
- expand_factory,
- preemptive_expand_factory);
+ return new NetEqImpl(config, NetEqImpl::Dependencies(config));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
index ead9586f5ce..e92567eef5b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
@@ -51,6 +51,8 @@
'dependencies': [
'<@(neteq_dependencies)',
'<(webrtc_root)/common.gyp:webrtc_common',
+ 'builtin_audio_decoder_factory',
+ 'rent_a_codec',
],
'defines': [
'<@(neteq_defines)',
@@ -73,6 +75,8 @@
'buffer_level_filter.h',
'comfort_noise.cc',
'comfort_noise.h',
+ 'cross_correlation.cc',
+ 'cross_correlation.h',
'decision_logic.cc',
'decision_logic.h',
'decision_logic_fax.cc',
@@ -105,6 +109,8 @@
'statistics_calculator.h',
'normal.cc',
'normal.h',
+ 'packet.cc',
+ 'packet.h',
'packet_buffer.cc',
'packet_buffer.h',
'payload_splitter.cc',
@@ -119,6 +125,8 @@
'rtcp.h',
'sync_buffer.cc',
'sync_buffer.h',
+ 'tick_timer.cc',
+ 'tick_timer.h',
'timestamp_scaler.cc',
'timestamp_scaler.h',
'time_stretch.cc',
@@ -206,19 +214,6 @@
],
}, # neteq_unittest_tools
], # targets
- 'conditions': [
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_decoder_unittests_apk',
- ],
- },
- ],
- }],
- ],
}], # include_tests
], # conditions
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
index 50c24a3b73a..25fa1a7365c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -189,7 +189,9 @@ class NetEqExternalVsInternalDecoderTest : public NetEqExternalDecoderUnitTest,
void GetAndVerifyOutput() override {
// Get audio from internal decoder instance.
- EXPECT_EQ(NetEq::kOK, neteq_internal_->GetAudio(&output_internal_));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_internal_->GetAudio(&output_internal_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(1u, output_internal_.num_channels_);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
output_internal_.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index db37e716d66..7f8661bae89 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -14,6 +14,7 @@
#include <memory.h> // memset
#include <algorithm>
+#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -21,6 +22,7 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/accelerate.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
@@ -42,6 +44,7 @@
#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
#include "webrtc/modules/include/module_common_types.h"
@@ -52,33 +55,43 @@
namespace webrtc {
+NetEqImpl::Dependencies::Dependencies(const NetEq::Config& config)
+ : tick_timer(new TickTimer),
+ buffer_level_filter(new BufferLevelFilter),
+ decoder_database(new DecoderDatabase(CreateBuiltinAudioDecoderFactory())),
+ delay_peak_detector(new DelayPeakDetector(tick_timer.get())),
+ delay_manager(new DelayManager(config.max_packets_in_buffer,
+ delay_peak_detector.get(),
+ tick_timer.get())),
+ dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)),
+ dtmf_tone_generator(new DtmfToneGenerator),
+ packet_buffer(
+ new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())),
+ payload_splitter(new PayloadSplitter),
+ timestamp_scaler(new TimestampScaler(*decoder_database)),
+ accelerate_factory(new AccelerateFactory),
+ expand_factory(new ExpandFactory),
+ preemptive_expand_factory(new PreemptiveExpandFactory) {}
+
+NetEqImpl::Dependencies::~Dependencies() = default;
+
NetEqImpl::NetEqImpl(const NetEq::Config& config,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler,
- AccelerateFactory* accelerate_factory,
- ExpandFactory* expand_factory,
- PreemptiveExpandFactory* preemptive_expand_factory,
+ Dependencies&& deps,
bool create_components)
- : buffer_level_filter_(buffer_level_filter),
- decoder_database_(decoder_database),
- delay_manager_(delay_manager),
- delay_peak_detector_(delay_peak_detector),
- dtmf_buffer_(dtmf_buffer),
- dtmf_tone_generator_(dtmf_tone_generator),
- packet_buffer_(packet_buffer),
- payload_splitter_(payload_splitter),
- timestamp_scaler_(timestamp_scaler),
+ : tick_timer_(std::move(deps.tick_timer)),
+ buffer_level_filter_(std::move(deps.buffer_level_filter)),
+ decoder_database_(std::move(deps.decoder_database)),
+ delay_manager_(std::move(deps.delay_manager)),
+ delay_peak_detector_(std::move(deps.delay_peak_detector)),
+ dtmf_buffer_(std::move(deps.dtmf_buffer)),
+ dtmf_tone_generator_(std::move(deps.dtmf_tone_generator)),
+ packet_buffer_(std::move(deps.packet_buffer)),
+ payload_splitter_(std::move(deps.payload_splitter)),
+ timestamp_scaler_(std::move(deps.timestamp_scaler)),
vad_(new PostDecodeVad()),
- expand_factory_(expand_factory),
- accelerate_factory_(accelerate_factory),
- preemptive_expand_factory_(preemptive_expand_factory),
+ expand_factory_(std::move(deps.expand_factory)),
+ accelerate_factory_(std::move(deps.accelerate_factory)),
+ preemptive_expand_factory_(std::move(deps.preemptive_expand_factory)),
last_mode_(kModeNormal),
decoded_buffer_length_(kMaxFrameSize),
decoded_buffer_(new int16_t[decoded_buffer_length_]),
@@ -95,7 +108,8 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
background_noise_mode_(config.background_noise_mode),
playout_mode_(config.playout_mode),
enable_fast_accelerate_(config.enable_fast_accelerate),
- nack_enabled_(false) {
+ nack_enabled_(false),
+ enable_muted_state_(config.enable_muted_state) {
LOG(LS_INFO) << "NetEq config: " << config.ToString();
int fs = config.sample_rate_hz;
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
@@ -103,6 +117,7 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
"Changing to 8000 Hz.";
fs = 8000;
}
+ delay_manager_->SetMaximumDelay(config.max_delay_ms);
fs_hz_ = fs;
fs_mult_ = fs / 8000;
last_output_sample_rate_hz_ = fs;
@@ -191,10 +206,10 @@ void SetAudioFrameActivityAndType(bool vad_enabled,
}
} // namespace
-int NetEqImpl::GetAudio(AudioFrame* audio_frame) {
+int NetEqImpl::GetAudio(AudioFrame* audio_frame, bool* muted) {
TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
rtc::CritScope lock(&crit_sect_);
- int error = GetAudioInternal(audio_frame);
+ int error = GetAudioInternal(audio_frame, muted);
RTC_DCHECK_EQ(
audio_frame->sample_rate_hz_,
rtc::checked_cast<int>(audio_frame->samples_per_channel_ * 100));
@@ -487,6 +502,11 @@ const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
return sync_buffer_.get();
}
+Operations NetEqImpl::last_operation_for_test() const {
+ rtc::CritScope lock(&crit_sect_);
+ return last_operation_;
+}
+
// Methods below this line are private.
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
@@ -532,7 +552,8 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
packet->header.numCSRCs = 0;
packet->payload_length = payload.size();
packet->primary = true;
- packet->waiting_time = 0;
+ // Waiting time will be set upon inserting the packet in the buffer.
+ RTC_DCHECK(!packet->waiting_time);
packet->payload = new uint8_t[packet->payload_length];
packet->sync_packet = is_sync_packet;
if (!packet->payload) {
@@ -664,13 +685,15 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
}
}
- // Update bandwidth estimate, if the packet is not sync-packet.
- if (!packet_list.empty() && !packet_list.front()->sync_packet) {
+ // Update bandwidth estimate, if the packet is not sync-packet nor comfort
+ // noise.
+ if (!packet_list.empty() && !packet_list.front()->sync_packet &&
+ !decoder_database_->IsComfortNoise(main_header.payloadType)) {
// The list can be empty here if we got nothing but DTMF payloads.
AudioDecoder* decoder =
decoder_database_->GetDecoder(main_header.payloadType);
assert(decoder); // Should always get a valid object, since we have
- // already checked that the payload types are known.
+ // already checked that the payload types are known.
decoder->IncomingPacket(packet_list.front()->payload,
packet_list.front()->payload_length,
packet_list.front()->header.sequenceNumber,
@@ -728,14 +751,18 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
assert(rtp_header);
int payload_type = rtp_header->payloadType;
- AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
- assert(decoder); // Payloads are already checked to be valid.
+ size_t channels = 1;
+ if (!decoder_database_->IsComfortNoise(payload_type)) {
+ AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
+ assert(decoder); // Payloads are already checked to be valid.
+ channels = decoder->Channels();
+ }
const DecoderDatabase::DecoderInfo* decoder_info =
decoder_database_->GetDecoderInfo(payload_type);
assert(decoder_info);
if (decoder_info->fs_hz != fs_hz_ ||
- decoder->Channels() != algorithm_buffer_->Channels()) {
- SetSampleRateAndChannels(decoder_info->fs_hz, decoder->Channels());
+ channels != algorithm_buffer_->Channels()) {
+ SetSampleRateAndChannels(decoder_info->fs_hz, channels);
}
if (nack_enabled_) {
RTC_DCHECK(nack_);
@@ -783,11 +810,32 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
return 0;
}
-int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
+int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, bool* muted) {
PacketList packet_list;
DtmfEvent dtmf_event;
Operations operation;
bool play_dtmf;
+ *muted = false;
+ tick_timer_->Increment();
+ stats_.IncreaseCounter(output_size_samples_, fs_hz_);
+
+ // Check for muted state.
+ if (enable_muted_state_ && expand_->Muted() && packet_buffer_->Empty()) {
+ RTC_DCHECK_EQ(last_mode_, kModeExpand);
+ playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
+ audio_frame->sample_rate_hz_ = fs_hz_;
+ audio_frame->samples_per_channel_ = output_size_samples_;
+ audio_frame->timestamp_ =
+ first_packet_
+ ? 0
+ : timestamp_scaler_->ToExternal(playout_timestamp_) -
+ static_cast<uint32_t>(audio_frame->samples_per_channel_);
+ audio_frame->num_channels_ = sync_buffer_->Channels();
+ stats_.ExpandedNoiseSamples(output_size_samples_);
+ *muted = true;
+ return 0;
+ }
+
int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
&play_dtmf);
if (return_value != 0) {
@@ -806,6 +854,11 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
vad_->Update(decoded_buffer_.get(), static_cast<size_t>(length), speech_type,
sid_frame_available, fs_hz_);
+ if (sid_frame_available || speech_type == AudioDecoder::kComfortNoise) {
+ // Start a new stopwatch since we are decoding a new CNG packet.
+ generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
+ }
+
algorithm_buffer_->Clear();
switch (operation) {
case kNormal: {
@@ -884,6 +937,7 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
return kInvalidOperation;
}
} // End of switch.
+ last_operation_ = operation;
if (return_value < 0) {
return return_value;
}
@@ -978,6 +1032,12 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
: timestamp_scaler_->ToExternal(playout_timestamp_) -
static_cast<uint32_t>(audio_frame->samples_per_channel_);
+ if (!(last_mode_ == kModeRfc3389Cng ||
+ last_mode_ == kModeCodecInternalCng ||
+ last_mode_ == kModeExpand)) {
+ generated_noise_stopwatch_.reset();
+ }
+
if (decode_return_value) return decode_return_value;
return return_value;
}
@@ -990,10 +1050,6 @@ int NetEqImpl::GetDecision(Operations* operation,
*play_dtmf = false;
*operation = kUndefined;
- // Increment time counters.
- packet_buffer_->IncrementWaitingTimes();
- stats_.IncreaseCounter(output_size_samples_, fs_hz_);
-
assert(sync_buffer_.get());
uint32_t end_timestamp = sync_buffer_->end_timestamp();
if (!new_codec_) {
@@ -1002,14 +1058,22 @@ int NetEqImpl::GetDecision(Operations* operation,
}
const RTPHeader* header = packet_buffer_->NextRtpHeader();
+ RTC_DCHECK(!generated_noise_stopwatch_ ||
+ generated_noise_stopwatch_->ElapsedTicks() >= 1);
+ uint64_t generated_noise_samples =
+ generated_noise_stopwatch_
+ ? (generated_noise_stopwatch_->ElapsedTicks() - 1) *
+ output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+
if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
// Because of timestamp peculiarities, we have to "manually" disallow using
// a CNG packet with the same timestamp as the one that was last played.
// This can happen when using redundancy and will cause the timing to shift.
while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
(end_timestamp >= header->timestamp ||
- end_timestamp + decision_logic_->generated_noise_samples() >
- header->timestamp)) {
+ end_timestamp + generated_noise_samples > header->timestamp)) {
// Don't use this packet, discard it.
if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
assert(false); // Must be ok by design.
@@ -1037,7 +1101,7 @@ int NetEqImpl::GetDecision(Operations* operation,
// Check if it is time to play a DTMF event.
if (dtmf_buffer_->GetEvent(
static_cast<uint32_t>(
- end_timestamp + decision_logic_->generated_noise_samples()),
+ end_timestamp + generated_noise_samples),
dtmf_event)) {
*play_dtmf = true;
}
@@ -1045,13 +1109,14 @@ int NetEqImpl::GetDecision(Operations* operation,
// Get instruction.
assert(sync_buffer_.get());
assert(expand_.get());
- *operation = decision_logic_->GetDecision(*sync_buffer_,
- *expand_,
- decoder_frame_length_,
- header,
- last_mode_,
- *play_dtmf,
- &reset_decoder_);
+ generated_noise_samples =
+ generated_noise_stopwatch_
+ ? generated_noise_stopwatch_->ElapsedTicks() * output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+ *operation = decision_logic_->GetDecision(
+ *sync_buffer_, *expand_, decoder_frame_length_, header, last_mode_,
+ *play_dtmf, generated_noise_samples, &reset_decoder_);
// Check if we already have enough samples in the |sync_buffer_|. If so,
// change decision to normal, unless the decision was merge, accelerate, or
@@ -1124,15 +1189,19 @@ int NetEqImpl::GetDecision(Operations* operation,
// TODO(hlundin): Write test for this.
// Update timestamp.
timestamp_ = end_timestamp;
- if (decision_logic_->generated_noise_samples() > 0 &&
- last_mode_ != kModeDtmf) {
+ const uint64_t generated_noise_samples =
+ generated_noise_stopwatch_
+ ? generated_noise_stopwatch_->ElapsedTicks() *
+ output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+ if (generated_noise_samples > 0 && last_mode_ != kModeDtmf) {
// Make a jump in timestamp due to the recently played comfort noise.
uint32_t timestamp_jump =
- static_cast<uint32_t>(decision_logic_->generated_noise_samples());
+ static_cast<uint32_t>(generated_noise_samples);
sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
timestamp_ += timestamp_jump;
}
- decision_logic_->set_generated_noise_samples(0);
return 0;
}
case kAccelerate:
@@ -1215,9 +1284,6 @@ int NetEqImpl::GetDecision(Operations* operation,
// We are about to decode and use a non-CNG packet.
decision_logic_->SetCngOff();
}
- // Reset CNG timestamp as a new packet will be delivered.
- // (Also if this is a CNG packet, since playedOutTS is updated.)
- decision_logic_->set_generated_noise_samples(0);
extracted_samples = ExtractPackets(required_samples, packet_list);
if (extracted_samples < 0) {
@@ -1297,7 +1363,7 @@ int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
decoder->Reset();
// Reset comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder)
cng_decoder->Reset();
@@ -1550,6 +1616,12 @@ int NetEqImpl::DoExpand(bool play_dtmf) {
if (!play_dtmf) {
dtmf_tone_generator_->Reset();
}
+
+ if (!generated_noise_stopwatch_) {
+ // Start a new stopwatch since we may be covering for a lost CNG packet.
+ generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
+ }
+
return 0;
}
@@ -1920,8 +1992,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
return -1;
}
stats_.PacketsDiscarded(discard_count);
- // Store waiting time in ms; packets->waiting_time is in "output blocks".
- stats_.StoreWaitingTime(packet->waiting_time * kOutputSizeMs);
+ stats_.StoreWaitingTime(packet->waiting_time->ElapsedMs());
assert(packet->payload_length > 0);
packet_list->push_back(packet); // Store packet in list.
@@ -1955,7 +2026,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
stats_.SecondaryDecodedSamples(packet_duration);
}
}
- } else {
+ } else if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
LOG(LS_WARNING) << "Unknown payload type "
<< static_cast<int>(packet->header.payloadType);
assert(false);
@@ -2023,7 +2094,7 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
mute_factor_array_[i] = 16384; // 1.0 in Q14.
}
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder)
cng_decoder->Reset();
@@ -2094,11 +2165,9 @@ NetEqImpl::OutputType NetEqImpl::LastOutputType() {
}
void NetEqImpl::CreateDecisionLogic() {
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- playout_mode_,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
+ decision_logic_.reset(DecisionLogic::Create(
+ fs_hz_, output_size_samples_, playout_mode_, decoder_database_.get(),
+ *packet_buffer_.get(), delay_manager_.get(), buffer_level_filter_.get(),
+ tick_timer_.get()));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
index 75055a7b47f..cc5550411f2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -24,6 +24,7 @@
#include "webrtc/modules/audio_coding/neteq/random_vector.h"
#include "webrtc/modules/audio_coding/neteq/rtcp.h"
#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -65,21 +66,33 @@ class NetEqImpl : public webrtc::NetEq {
kVadPassive
};
- // Creates a new NetEqImpl object. The object will assume ownership of all
- // injected dependencies, and will delete them when done.
+ struct Dependencies {
+ // The constructor populates the Dependencies struct with the default
+ // implementations of the objects. They can all be replaced by the user
+ // before sending the struct to the NetEqImpl constructor. However, there
+ // are dependencies between some of the classes inside the struct, so
+ // swapping out one may make it necessary to re-create another one.
+ explicit Dependencies(const NetEq::Config& config);
+ ~Dependencies();
+
+ std::unique_ptr<TickTimer> tick_timer;
+ std::unique_ptr<BufferLevelFilter> buffer_level_filter;
+ std::unique_ptr<DecoderDatabase> decoder_database;
+ std::unique_ptr<DelayPeakDetector> delay_peak_detector;
+ std::unique_ptr<DelayManager> delay_manager;
+ std::unique_ptr<DtmfBuffer> dtmf_buffer;
+ std::unique_ptr<DtmfToneGenerator> dtmf_tone_generator;
+ std::unique_ptr<PacketBuffer> packet_buffer;
+ std::unique_ptr<PayloadSplitter> payload_splitter;
+ std::unique_ptr<TimestampScaler> timestamp_scaler;
+ std::unique_ptr<AccelerateFactory> accelerate_factory;
+ std::unique_ptr<ExpandFactory> expand_factory;
+ std::unique_ptr<PreemptiveExpandFactory> preemptive_expand_factory;
+ };
+
+ // Creates a new NetEqImpl object.
NetEqImpl(const NetEq::Config& config,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler,
- AccelerateFactory* accelerate_factory,
- ExpandFactory* expand_factory,
- PreemptiveExpandFactory* preemptive_expand_factory,
+ Dependencies&& deps,
bool create_components = true);
~NetEqImpl() override;
@@ -104,7 +117,7 @@ class NetEqImpl : public webrtc::NetEq {
int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
uint32_t receive_timestamp) override;
- int GetAudio(AudioFrame* audio_frame) override;
+ int GetAudio(AudioFrame* audio_frame, bool* muted) override;
int RegisterPayloadType(NetEqDecoder codec,
const std::string& codec_name,
@@ -191,12 +204,15 @@ class NetEqImpl : public webrtc::NetEq {
// This accessor method is only intended for testing purposes.
const SyncBuffer* sync_buffer_for_test() const;
+ Operations last_operation_for_test() const;
protected:
static const int kOutputSizeMs = 10;
- static const size_t kMaxFrameSize = 2880; // 60 ms @ 48 kHz.
+ static const size_t kMaxFrameSize = 5760; // 120 ms @ 48 kHz.
// TODO(hlundin): Provide a better value for kSyncBufferSize.
- static const size_t kSyncBufferSize = 2 * kMaxFrameSize;
+ // Current value is kMaxFrameSize + 60 ms * 48 kHz, which is enough for
+ // calculating correlations of current frame against history.
+ static const size_t kSyncBufferSize = kMaxFrameSize + 60 * 48;
// Inserts a new packet into NetEq. This is used by the InsertPacket method
// above. Returns 0 on success, otherwise an error code.
@@ -209,7 +225,7 @@ class NetEqImpl : public webrtc::NetEq {
// Delivers 10 ms of audio data. The data is written to |audio_frame|.
// Returns 0 on success, otherwise an error code.
- int GetAudioInternal(AudioFrame* audio_frame)
+ int GetAudioInternal(AudioFrame* audio_frame, bool* muted)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Provides a decision to the GetAudioInternal method. The decision what to
@@ -328,6 +344,7 @@ class NetEqImpl : public webrtc::NetEq {
virtual void CreateDecisionLogic() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
rtc::CriticalSection crit_sect_;
+ const std::unique_ptr<TickTimer> tick_timer_ GUARDED_BY(crit_sect_);
const std::unique_ptr<BufferLevelFilter> buffer_level_filter_
GUARDED_BY(crit_sect_);
const std::unique_ptr<DecoderDatabase> decoder_database_
@@ -369,6 +386,7 @@ class NetEqImpl : public webrtc::NetEq {
size_t output_size_samples_ GUARDED_BY(crit_sect_);
size_t decoder_frame_length_ GUARDED_BY(crit_sect_);
Modes last_mode_ GUARDED_BY(crit_sect_);
+ Operations last_operation_ GUARDED_BY(crit_sect_);
std::unique_ptr<int16_t[]> mute_factor_array_ GUARDED_BY(crit_sect_);
size_t decoded_buffer_length_ GUARDED_BY(crit_sect_);
std::unique_ptr<int16_t[]> decoded_buffer_ GUARDED_BY(crit_sect_);
@@ -387,8 +405,11 @@ class NetEqImpl : public webrtc::NetEq {
bool enable_fast_accelerate_ GUARDED_BY(crit_sect_);
std::unique_ptr<Nack> nack_ GUARDED_BY(crit_sect_);
bool nack_enabled_ GUARDED_BY(crit_sect_);
+ const bool enable_muted_state_ GUARDED_BY(crit_sect_);
AudioFrame::VADActivity last_vad_activity_ GUARDED_BY(crit_sect_) =
AudioFrame::kVadPassive;
+ std::unique_ptr<TickTimer::Stopwatch> generated_noise_stopwatch_
+ GUARDED_BY(crit_sect_);
private:
RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
index 561c0459bfb..43db87f4fa7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
@@ -54,111 +56,82 @@ int DeletePacketsAndReturnOk(PacketList* packet_list) {
class NetEqImplTest : public ::testing::Test {
protected:
- NetEqImplTest()
- : neteq_(NULL),
- config_(),
- mock_buffer_level_filter_(NULL),
- buffer_level_filter_(NULL),
- use_mock_buffer_level_filter_(true),
- mock_decoder_database_(NULL),
- decoder_database_(NULL),
- use_mock_decoder_database_(true),
- mock_delay_peak_detector_(NULL),
- delay_peak_detector_(NULL),
- use_mock_delay_peak_detector_(true),
- mock_delay_manager_(NULL),
- delay_manager_(NULL),
- use_mock_delay_manager_(true),
- mock_dtmf_buffer_(NULL),
- dtmf_buffer_(NULL),
- use_mock_dtmf_buffer_(true),
- mock_dtmf_tone_generator_(NULL),
- dtmf_tone_generator_(NULL),
- use_mock_dtmf_tone_generator_(true),
- mock_packet_buffer_(NULL),
- packet_buffer_(NULL),
- use_mock_packet_buffer_(true),
- mock_payload_splitter_(NULL),
- payload_splitter_(NULL),
- use_mock_payload_splitter_(true),
- timestamp_scaler_(NULL) {
- config_.sample_rate_hz = 8000;
- }
+ NetEqImplTest() { config_.sample_rate_hz = 8000; }
void CreateInstance() {
+ NetEqImpl::Dependencies deps(config_);
+
+ // Get a local pointer to NetEq's TickTimer object.
+ tick_timer_ = deps.tick_timer.get();
+
if (use_mock_buffer_level_filter_) {
- mock_buffer_level_filter_ = new MockBufferLevelFilter;
- buffer_level_filter_ = mock_buffer_level_filter_;
- } else {
- buffer_level_filter_ = new BufferLevelFilter;
+ std::unique_ptr<MockBufferLevelFilter> mock(new MockBufferLevelFilter);
+ mock_buffer_level_filter_ = mock.get();
+ deps.buffer_level_filter = std::move(mock);
}
+ buffer_level_filter_ = deps.buffer_level_filter.get();
+
if (use_mock_decoder_database_) {
- mock_decoder_database_ = new MockDecoderDatabase;
+ std::unique_ptr<MockDecoderDatabase> mock(new MockDecoderDatabase);
+ mock_decoder_database_ = mock.get();
EXPECT_CALL(*mock_decoder_database_, GetActiveCngDecoder())
.WillOnce(ReturnNull());
- decoder_database_ = mock_decoder_database_;
- } else {
- decoder_database_ = new DecoderDatabase;
+ deps.decoder_database = std::move(mock);
}
+ decoder_database_ = deps.decoder_database.get();
+
if (use_mock_delay_peak_detector_) {
- mock_delay_peak_detector_ = new MockDelayPeakDetector;
+ std::unique_ptr<MockDelayPeakDetector> mock(
+ new MockDelayPeakDetector(tick_timer_));
+ mock_delay_peak_detector_ = mock.get();
EXPECT_CALL(*mock_delay_peak_detector_, Reset()).Times(1);
- delay_peak_detector_ = mock_delay_peak_detector_;
- } else {
- delay_peak_detector_ = new DelayPeakDetector;
+ deps.delay_peak_detector = std::move(mock);
}
+ delay_peak_detector_ = deps.delay_peak_detector.get();
+
if (use_mock_delay_manager_) {
- mock_delay_manager_ = new MockDelayManager(config_.max_packets_in_buffer,
- delay_peak_detector_);
+ std::unique_ptr<MockDelayManager> mock(new MockDelayManager(
+ config_.max_packets_in_buffer, delay_peak_detector_, tick_timer_));
+ mock_delay_manager_ = mock.get();
EXPECT_CALL(*mock_delay_manager_, set_streaming_mode(false)).Times(1);
- delay_manager_ = mock_delay_manager_;
- } else {
- delay_manager_ =
- new DelayManager(config_.max_packets_in_buffer, delay_peak_detector_);
+ deps.delay_manager = std::move(mock);
}
+ delay_manager_ = deps.delay_manager.get();
+
if (use_mock_dtmf_buffer_) {
- mock_dtmf_buffer_ = new MockDtmfBuffer(config_.sample_rate_hz);
- dtmf_buffer_ = mock_dtmf_buffer_;
- } else {
- dtmf_buffer_ = new DtmfBuffer(config_.sample_rate_hz);
+ std::unique_ptr<MockDtmfBuffer> mock(
+ new MockDtmfBuffer(config_.sample_rate_hz));
+ mock_dtmf_buffer_ = mock.get();
+ deps.dtmf_buffer = std::move(mock);
}
+ dtmf_buffer_ = deps.dtmf_buffer.get();
+
if (use_mock_dtmf_tone_generator_) {
- mock_dtmf_tone_generator_ = new MockDtmfToneGenerator;
- dtmf_tone_generator_ = mock_dtmf_tone_generator_;
- } else {
- dtmf_tone_generator_ = new DtmfToneGenerator;
+ std::unique_ptr<MockDtmfToneGenerator> mock(new MockDtmfToneGenerator);
+ mock_dtmf_tone_generator_ = mock.get();
+ deps.dtmf_tone_generator = std::move(mock);
}
+ dtmf_tone_generator_ = deps.dtmf_tone_generator.get();
+
if (use_mock_packet_buffer_) {
- mock_packet_buffer_ = new MockPacketBuffer(config_.max_packets_in_buffer);
- packet_buffer_ = mock_packet_buffer_;
- } else {
- packet_buffer_ = new PacketBuffer(config_.max_packets_in_buffer);
+ std::unique_ptr<MockPacketBuffer> mock(
+ new MockPacketBuffer(config_.max_packets_in_buffer, tick_timer_));
+ mock_packet_buffer_ = mock.get();
+ deps.packet_buffer = std::move(mock);
}
+ packet_buffer_ = deps.packet_buffer.get();
+
if (use_mock_payload_splitter_) {
- mock_payload_splitter_ = new MockPayloadSplitter;
- payload_splitter_ = mock_payload_splitter_;
- } else {
- payload_splitter_ = new PayloadSplitter;
+ std::unique_ptr<MockPayloadSplitter> mock(new MockPayloadSplitter);
+ mock_payload_splitter_ = mock.get();
+ deps.payload_splitter = std::move(mock);
}
- timestamp_scaler_ = new TimestampScaler(*decoder_database_);
- AccelerateFactory* accelerate_factory = new AccelerateFactory;
- ExpandFactory* expand_factory = new ExpandFactory;
- PreemptiveExpandFactory* preemptive_expand_factory =
- new PreemptiveExpandFactory;
-
- neteq_ = new NetEqImpl(config_,
- buffer_level_filter_,
- decoder_database_,
- delay_manager_,
- delay_peak_detector_,
- dtmf_buffer_,
- dtmf_tone_generator_,
- packet_buffer_,
- payload_splitter_,
- timestamp_scaler_,
- accelerate_factory,
- expand_factory,
- preemptive_expand_factory);
+ payload_splitter_ = deps.payload_splitter.get();
+
+ deps.timestamp_scaler = std::unique_ptr<TimestampScaler>(
+ new TimestampScaler(*deps.decoder_database.get()));
+
+ neteq_.reset(new NetEqImpl(config_, std::move(deps)));
ASSERT_TRUE(neteq_ != NULL);
}
@@ -196,36 +169,35 @@ class NetEqImplTest : public ::testing::Test {
if (use_mock_packet_buffer_) {
EXPECT_CALL(*mock_packet_buffer_, Die()).Times(1);
}
- delete neteq_;
}
- NetEqImpl* neteq_;
+ std::unique_ptr<NetEqImpl> neteq_;
NetEq::Config config_;
- MockBufferLevelFilter* mock_buffer_level_filter_;
- BufferLevelFilter* buffer_level_filter_;
- bool use_mock_buffer_level_filter_;
- MockDecoderDatabase* mock_decoder_database_;
- DecoderDatabase* decoder_database_;
- bool use_mock_decoder_database_;
- MockDelayPeakDetector* mock_delay_peak_detector_;
- DelayPeakDetector* delay_peak_detector_;
- bool use_mock_delay_peak_detector_;
- MockDelayManager* mock_delay_manager_;
- DelayManager* delay_manager_;
- bool use_mock_delay_manager_;
- MockDtmfBuffer* mock_dtmf_buffer_;
- DtmfBuffer* dtmf_buffer_;
- bool use_mock_dtmf_buffer_;
- MockDtmfToneGenerator* mock_dtmf_tone_generator_;
- DtmfToneGenerator* dtmf_tone_generator_;
- bool use_mock_dtmf_tone_generator_;
- MockPacketBuffer* mock_packet_buffer_;
- PacketBuffer* packet_buffer_;
- bool use_mock_packet_buffer_;
- MockPayloadSplitter* mock_payload_splitter_;
- PayloadSplitter* payload_splitter_;
- bool use_mock_payload_splitter_;
- TimestampScaler* timestamp_scaler_;
+ TickTimer* tick_timer_ = nullptr;
+ MockBufferLevelFilter* mock_buffer_level_filter_ = nullptr;
+ BufferLevelFilter* buffer_level_filter_ = nullptr;
+ bool use_mock_buffer_level_filter_ = true;
+ MockDecoderDatabase* mock_decoder_database_ = nullptr;
+ DecoderDatabase* decoder_database_ = nullptr;
+ bool use_mock_decoder_database_ = true;
+ MockDelayPeakDetector* mock_delay_peak_detector_ = nullptr;
+ DelayPeakDetector* delay_peak_detector_ = nullptr;
+ bool use_mock_delay_peak_detector_ = true;
+ MockDelayManager* mock_delay_manager_ = nullptr;
+ DelayManager* delay_manager_ = nullptr;
+ bool use_mock_delay_manager_ = true;
+ MockDtmfBuffer* mock_dtmf_buffer_ = nullptr;
+ DtmfBuffer* dtmf_buffer_ = nullptr;
+ bool use_mock_dtmf_buffer_ = true;
+ MockDtmfToneGenerator* mock_dtmf_tone_generator_ = nullptr;
+ DtmfToneGenerator* dtmf_tone_generator_ = nullptr;
+ bool use_mock_dtmf_tone_generator_ = true;
+ MockPacketBuffer* mock_packet_buffer_ = nullptr;
+ PacketBuffer* packet_buffer_ = nullptr;
+ bool use_mock_packet_buffer_ = true;
+ MockPayloadSplitter* mock_payload_splitter_ = nullptr;
+ PayloadSplitter* payload_splitter_ = nullptr;
+ bool use_mock_payload_splitter_ = true;
};
@@ -301,8 +273,8 @@ TEST_F(NetEqImplTest, InsertPacket) {
.WillRepeatedly(Return(&mock_decoder));
EXPECT_CALL(*mock_decoder_database_, IsComfortNoise(kPayloadType))
.WillRepeatedly(Return(false)); // This is not CNG.
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderPCMu;
+ DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(&info));
@@ -356,6 +328,9 @@ TEST_F(NetEqImplTest, InsertPacket) {
}
// Expectations for payload splitter.
+ EXPECT_CALL(*mock_payload_splitter_, SplitFec(_, _))
+ .Times(2)
+ .WillRepeatedly(Return(PayloadSplitter::kOK));
EXPECT_CALL(*mock_payload_splitter_, SplitAudio(_, _))
.Times(2)
.WillRepeatedly(Return(PayloadSplitter::kOK));
@@ -466,7 +441,9 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -521,6 +498,8 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1));
EXPECT_CALL(mock_decoder, IncomingPacket(_, kPayloadLengthBytes, _, _, _))
.WillRepeatedly(Return(0));
+ EXPECT_CALL(mock_decoder, PacketDuration(_, kPayloadLengthBytes))
+ .WillRepeatedly(Return(kPayloadLengthSamples));
int16_t dummy_output[kPayloadLengthSamples] = {0};
// The below expectation will make the mock decoder write
// |kPayloadLengthSamples| zeros to the output array, and mark it as speech.
@@ -541,7 +520,8 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -569,7 +549,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
Return(kPayloadLengthSamples)));
// Pull audio once.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -609,7 +589,8 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_LE(output.samples_per_channel_, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
@@ -630,7 +611,7 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
// Pull audio repeatedly and make sure we get normal output, that is not PLC.
for (size_t i = 0; i < 3; ++i) {
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_LE(output.samples_per_channel_, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
@@ -734,7 +715,8 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
50 * kSampleRateKhz, 10 * kSampleRateKhz
};
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
rtc::Optional<uint32_t> last_timestamp = neteq_->GetPlayoutTimestamp();
ASSERT_TRUE(last_timestamp);
@@ -756,7 +738,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
SCOPED_TRACE("");
verify_timestamp(neteq_->GetPlayoutTimestamp(), i);
}
@@ -772,7 +754,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
SCOPED_TRACE("");
verify_timestamp(neteq_->GetPlayoutTimestamp(), i);
}
@@ -786,7 +768,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
TEST_F(NetEqImplTest, UnsupportedDecoder) {
UseNoMocks();
CreateInstance();
- static const size_t kNetEqMaxFrameSize = 2880; // 60 ms @ 48 kHz.
+ static const size_t kNetEqMaxFrameSize = 5760; // 120 ms @ 48 kHz.
static const size_t kChannels = 2;
const uint8_t kPayloadType = 17; // Just an arbitrary number.
@@ -796,7 +778,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
const size_t kPayloadLengthSamples =
static_cast<size_t>(10 * kSampleRateHz / 1000); // 10 ms.
const size_t kPayloadLengthBytes = 1;
- uint8_t payload[kPayloadLengthBytes]= {0};
+ uint8_t payload[kPayloadLengthBytes] = {0};
int16_t dummy_output[kPayloadLengthSamples * kChannels] = {0};
WebRtcRTPHeader rtp_header;
rtp_header.header.payloadType = kPayloadType;
@@ -806,11 +788,15 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
class MockAudioDecoder : public AudioDecoder {
public:
- void Reset() override {}
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ void Reset() /* override */ {}
MOCK_CONST_METHOD2(PacketDuration, int(const uint8_t*, size_t));
MOCK_METHOD5(DecodeInternal, int(const uint8_t*, size_t, int, int16_t*,
SpeechType*));
- size_t Channels() const override { return kChannels; }
+ size_t Channels() const /* override */ { return kChannels; }
} decoder_;
const uint8_t kFirstPayloadValue = 1;
@@ -860,9 +846,10 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
AudioFrame output;
+ bool muted;
// First call to GetAudio will try to decode the "faulty" packet.
// Expect kFail return value...
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
// ... and kOtherDecoderError error code.
EXPECT_EQ(NetEq::kOtherDecoderError, neteq_->LastError());
// Output size and number of channels should be correct.
@@ -872,7 +859,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
// Second call to GetAudio will decode the packet that is ok. No errors are
// expected.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels);
EXPECT_EQ(kChannels, output.num_channels_);
}
@@ -965,7 +952,8 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1057,13 +1045,14 @@ TEST_F(NetEqImplTest, DecodingError) {
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
// Pull audio again. Decoder fails.
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
@@ -1072,13 +1061,13 @@ TEST_F(NetEqImplTest, DecodingError) {
// returned.
// Pull audio again, should continue an expansion.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kPLC, output.speech_type_);
// Pull audio again, should behave normal.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1166,13 +1155,14 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kCNG, output.speech_type_);
// Pull audio again. Decoder fails.
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
@@ -1181,7 +1171,7 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
// returned.
// Pull audio again, should resume codec CNG.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kCNG, output.speech_type_);
@@ -1198,4 +1188,235 @@ TEST_F(NetEqImplTest, InitialLastOutputSampleRate) {
EXPECT_EQ(48000, neteq_->last_output_sample_rate_hz());
}
+TEST_F(NetEqImplTest, TickTimerIncrement) {
+ UseNoMocks();
+ CreateInstance();
+ ASSERT_TRUE(tick_timer_);
+ EXPECT_EQ(0u, tick_timer_->ticks());
+ AudioFrame output;
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
+ EXPECT_EQ(1u, tick_timer_->ticks());
+}
+
+class Decoder120ms : public AudioDecoder {
+ public:
+ Decoder120ms(SpeechType speech_type)
+ : next_value_(1),
+ speech_type_(speech_type) {}
+
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override {
+ size_t decoded_len =
+ rtc::CheckedDivExact(sample_rate_hz, 1000) * 120 * Channels();
+ for (size_t i = 0; i < decoded_len; ++i) {
+ decoded[i] = next_value_++;
+ }
+ *speech_type = speech_type_;
+ return decoded_len;
+ }
+
+ void Reset() override { next_value_ = 1; }
+ size_t Channels() const override { return 2; }
+
+ private:
+ int16_t next_value_;
+ SpeechType speech_type_;
+};
+
+class NetEqImplTest120ms : public NetEqImplTest {
+ protected:
+ NetEqImplTest120ms() : NetEqImplTest() {}
+ virtual ~NetEqImplTest120ms() {}
+
+ void CreateInstanceNoMocks() {
+ UseNoMocks();
+ CreateInstance();
+ }
+
+ void CreateInstanceWithDelayManagerMock() {
+ UseNoMocks();
+ use_mock_delay_manager_ = true;
+ CreateInstance();
+ }
+
+ uint32_t timestamp_diff_between_packets() const {
+ return rtc::CheckedDivExact(kSamplingFreq_, 1000u) * 120;
+ }
+
+ uint32_t first_timestamp() const { return 10u; }
+
+ void GetFirstPacket() {
+ bool muted;
+ for (int i = 0; i < 12; i++) {
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_FALSE(muted);
+ }
+ }
+
+ void InsertPacket(uint32_t timestamp) {
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = sequence_number_;
+ rtp_header.header.timestamp = timestamp;
+ rtp_header.header.ssrc = 15;
+ const size_t kPayloadLengthBytes = 1; // This can be arbitrary.
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload, 10));
+ sequence_number_++;
+ }
+
+ void Register120msCodec(AudioDecoder::SpeechType speech_type) {
+ decoder_.reset(new Decoder120ms(speech_type));
+ ASSERT_EQ(2u, decoder_->Channels());
+ EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
+ decoder_.get(), NetEqDecoder::kDecoderOpus_2ch,
+ "120ms codec", kPayloadType, kSamplingFreq_));
+ }
+
+ std::unique_ptr<Decoder120ms> decoder_;
+ AudioFrame output_;
+ const uint32_t kPayloadType = 17;
+ const uint32_t kSamplingFreq_ = 48000;
+ uint16_t sequence_number_ = 1;
+};
+
+TEST_F(NetEqImplTest120ms, AudioRepetition) {
+ config_.playout_mode = kPlayoutFax;
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAudioRepetition, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, AlternativePlc) {
+ config_.playout_mode = kPlayoutOff;
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAlternativePlc, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, CodecInternalCng) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kComfortNoise);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kCodecInternalCng, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Normal) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ EXPECT_EQ(kNormal, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Merge) {
+ CreateInstanceWithDelayManagerMock();
+
+ Register120msCodec(AudioDecoder::kSpeech);
+ InsertPacket(first_timestamp());
+
+ GetFirstPacket();
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+
+ InsertPacket(first_timestamp() + 2 * timestamp_diff_between_packets());
+
+ // Delay manager reports a target level which should cause a Merge.
+ EXPECT_CALL(*mock_delay_manager_, TargetLevel()).WillOnce(Return(-10));
+
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kMerge, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Expand) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kExpand, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, FastAccelerate) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a FastAccelerate.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(0), SetArgPointee<1>(0)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kFastAccelerate, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, PreemptiveExpand) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a PreemptiveExpand.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(100), SetArgPointee<1>(100)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kPreemptiveExpand, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Accelerate) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a Accelerate.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(1), SetArgPointee<1>(2)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAccelerate, neteq_->last_operation_for_test());
+}
+
}// namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index 770ebd57835..1a77abcd505 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -24,31 +24,36 @@ using ::testing::Return;
class MockAudioDecoder final : public AudioDecoder {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
static const int kPacketDuration = 960; // 48 kHz * 20 ms
explicit MockAudioDecoder(size_t num_channels)
: num_channels_(num_channels), fec_enabled_(false) {
}
- ~MockAudioDecoder() override { Die(); }
+ ~MockAudioDecoder() /* override */ { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Reset, void());
int PacketDuration(const uint8_t* encoded,
- size_t encoded_len) const override {
+ size_t encoded_len) const /* override */ {
return kPacketDuration;
}
int PacketDurationRedundant(const uint8_t* encoded,
- size_t encoded_len) const override {
+ size_t encoded_len) const /* override */ {
return kPacketDuration;
}
- bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override {
+ bool PacketHasFec(
+ const uint8_t* encoded, size_t encoded_len) const /* override */ {
return fec_enabled_;
}
- size_t Channels() const override { return num_channels_; }
+ size_t Channels() const /* override */ { return num_channels_; }
void set_fec_enabled(bool enable_fec) { fec_enabled_ = enable_fec; }
@@ -60,7 +65,7 @@ class MockAudioDecoder final : public AudioDecoder {
size_t encoded_len,
int /*sample_rate_hz*/,
int16_t* decoded,
- SpeechType* speech_type) override {
+ SpeechType* speech_type) /* override */ {
*speech_type = kSpeech;
memset(decoded, 0, sizeof(int16_t) * kPacketDuration * Channels());
return kPacketDuration * Channels();
@@ -70,7 +75,7 @@ class MockAudioDecoder final : public AudioDecoder {
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
- SpeechType* speech_type) override {
+ SpeechType* speech_type) /* override */ {
return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded,
speech_type);
}
@@ -294,7 +299,3 @@ TEST(NetEqNetworkStatsTest, NoiseExpansionTest) {
} // namespace test
} // namespace webrtc
-
-
-
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
index 4ee17d2a446..e1a9922b0b4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
@@ -212,11 +212,14 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
} while (Lost()); // If lost, immediately read the next packet.
}
// Get audio from mono instance.
- EXPECT_EQ(NetEq::kOK, neteq_mono_->GetAudio(&output_));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_mono_->GetAudio(&output_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(1u, output_.num_channels_);
EXPECT_EQ(output_size_samples_, output_.samples_per_channel_);
// Get audio from multi-channel instance.
- ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&output_multi_channel_));
+ ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&output_multi_channel_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(num_channels_, output_multi_channel_.num_channels_);
EXPECT_EQ(output_size_samples_,
output_multi_channel_.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
index f02d3deee9b..bb316e8a81d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
@@ -14,9 +14,12 @@
'target_name': 'rtc_event_log_source',
'type': 'static_library',
'dependencies': [
- '<(webrtc_root)/webrtc.gyp:rtc_event_log',
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log_parser',
'<(webrtc_root)/webrtc.gyp:rtc_event_log_proto',
],
+ 'export_dependent_settings': [
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log_parser',
+ ],
'sources': [
'tools/rtc_event_log_source.h',
'tools/rtc_event_log_source.cc',
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
index b6efe7d7d6b..cf8e5b474c2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -8,10 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for NetEQ.
- */
-
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include <math.h>
@@ -26,6 +22,8 @@
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/sha1digest.h"
+#include "webrtc/base/stringencode.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
@@ -45,6 +43,23 @@ DEFINE_bool(gen_ref, false, "Generate reference files.");
namespace {
+const std::string& PlatformChecksum(const std::string& checksum_general,
+ const std::string& checksum_android,
+ const std::string& checksum_win_32,
+ const std::string& checksum_win_64) {
+#ifdef WEBRTC_ANDROID
+ return checksum_android;
+#elif WEBRTC_WIN
+ #ifdef WEBRTC_ARCH_64_BITS
+ return checksum_win_64;
+ #else
+ return checksum_win_32;
+ #endif // WEBRTC_ARCH_64_BITS
+#else
+ return checksum_general;
+#endif // WEBRTC_WIN
+}
+
bool IsAllZero(const int16_t* buf, size_t buf_length) {
bool all_zero = true;
for (size_t n = 0; n < buf_length && all_zero; ++n)
@@ -89,186 +104,141 @@ void Convert(const webrtc::RtcpStatistics& stats_raw,
stats->set_jitter(stats_raw.jitter);
}
-void WriteMessage(FILE* file, const std::string& message) {
+void AddMessage(FILE* file, rtc::MessageDigest* digest,
+ const std::string& message) {
int32_t size = message.length();
- ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file));
- if (size <= 0)
- return;
- ASSERT_EQ(static_cast<size_t>(size),
- fwrite(message.data(), sizeof(char), size, file));
+ if (file)
+ ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file));
+ digest->Update(&size, sizeof(size));
+
+ if (file)
+ ASSERT_EQ(static_cast<size_t>(size),
+ fwrite(message.data(), sizeof(char), size, file));
+ digest->Update(message.data(), sizeof(char) * size);
}
-void ReadMessage(FILE* file, std::string* message) {
- int32_t size;
- ASSERT_EQ(1u, fread(&size, sizeof(size), 1, file));
- if (size <= 0)
- return;
- std::unique_ptr<char[]> buffer(new char[size]);
- ASSERT_EQ(static_cast<size_t>(size),
- fread(buffer.get(), sizeof(char), size, file));
- message->assign(buffer.get(), size);
-}
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
+void LoadDecoders(webrtc::NetEq* neteq) {
+ // Load PCMu.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCMu,
+ "pcmu", 0));
+ // Load PCMa.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCMa,
+ "pcma", 8));
+#ifdef WEBRTC_CODEC_ILBC
+ // Load iLBC.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderILBC,
+ "ilbc", 102));
+#endif
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+ // Load iSAC.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderISAC,
+ "isac", 103));
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ // Load iSAC SWB.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderISACswb,
+ "isac-swb", 104));
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderOpus,
+ "opus", 111));
+#endif
+ // Load PCM16B nb.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCM16B,
+ "pcm16-nb", 93));
+ // Load PCM16B wb.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(
+ webrtc::NetEqDecoder::kDecoderPCM16Bwb, "pcm16-wb", 94));
+ // Load PCM16B swb32.
+ ASSERT_EQ(
+ 0, neteq->RegisterPayloadType(
+ webrtc::NetEqDecoder::kDecoderPCM16Bswb32kHz, "pcm16-swb32", 95));
+ // Load CNG 8 kHz.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderCNGnb,
+ "cng-nb", 13));
+ // Load CNG 16 kHz.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderCNGwb,
+ "cng-wb", 98));
+}
} // namespace
namespace webrtc {
-class RefFiles {
+class ResultSink {
public:
- RefFiles(const std::string& input_file, const std::string& output_file);
- ~RefFiles();
- template<class T> void ProcessReference(const T& test_results);
- template<typename T, size_t n> void ProcessReference(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void WriteToFile(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void ReadFromFileAndCompare(
+ explicit ResultSink(const std::string& output_file);
+ ~ResultSink();
+
+ template<typename T, size_t n> void AddResult(
const T (&test_results)[n],
size_t length);
- void WriteToFile(const NetEqNetworkStatistics& stats);
- void ReadFromFileAndCompare(const NetEqNetworkStatistics& stats);
- void WriteToFile(const RtcpStatistics& stats);
- void ReadFromFileAndCompare(const RtcpStatistics& stats);
- FILE* input_fp_;
+ void AddResult(const NetEqNetworkStatistics& stats);
+ void AddResult(const RtcpStatistics& stats);
+
+ void VerifyChecksum(const std::string& ref_check_sum);
+
+ private:
FILE* output_fp_;
+ std::unique_ptr<rtc::MessageDigest> digest_;
};
-RefFiles::RefFiles(const std::string &input_file,
- const std::string &output_file)
- : input_fp_(NULL),
- output_fp_(NULL) {
- if (!input_file.empty()) {
- input_fp_ = fopen(input_file.c_str(), "rb");
- EXPECT_TRUE(input_fp_ != NULL);
- }
+ResultSink::ResultSink(const std::string &output_file)
+ : output_fp_(nullptr),
+ digest_(new rtc::Sha1Digest()) {
if (!output_file.empty()) {
output_fp_ = fopen(output_file.c_str(), "wb");
EXPECT_TRUE(output_fp_ != NULL);
}
}
-RefFiles::~RefFiles() {
- if (input_fp_) {
- EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
- fclose(input_fp_);
- }
- if (output_fp_) fclose(output_fp_);
-}
-
-template<class T>
-void RefFiles::ProcessReference(const T& test_results) {
- WriteToFile(test_results);
- ReadFromFileAndCompare(test_results);
+ResultSink::~ResultSink() {
+ if (output_fp_)
+ fclose(output_fp_);
}
template<typename T, size_t n>
-void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
- WriteToFile(test_results, length);
- ReadFromFileAndCompare(test_results, length);
-}
-
-template<typename T, size_t n>
-void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
+void ResultSink::AddResult(const T (&test_results)[n], size_t length) {
if (output_fp_) {
ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
}
+ digest_->Update(&test_results, sizeof(T) * length);
}
-template<typename T, size_t n>
-void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
- size_t length) {
- if (input_fp_) {
- // Read from ref file.
- T* ref = new T[length];
- ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
- // Compare
- ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
- delete [] ref;
- }
-}
-
-void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats_raw) {
+void ResultSink::AddResult(const NetEqNetworkStatistics& stats_raw) {
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!output_fp_)
- return;
neteq_unittest::NetEqNetworkStatistics stats;
Convert(stats_raw, &stats);
std::string stats_string;
ASSERT_TRUE(stats.SerializeToString(&stats_string));
- WriteMessage(output_fp_, stats_string);
+ AddMessage(output_fp_, digest_.get(), stats_string);
#else
FAIL() << "Writing to reference file requires Proto Buffer.";
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::ReadFromFileAndCompare(
- const NetEqNetworkStatistics& stats) {
+void ResultSink::AddResult(const RtcpStatistics& stats_raw) {
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!input_fp_)
- return;
-
- std::string stats_string;
- ReadMessage(input_fp_, &stats_string);
- neteq_unittest::NetEqNetworkStatistics ref_stats;
- ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
-
- // Compare
- ASSERT_EQ(stats.current_buffer_size_ms, ref_stats.current_buffer_size_ms());
- ASSERT_EQ(stats.preferred_buffer_size_ms,
- ref_stats.preferred_buffer_size_ms());
- ASSERT_EQ(stats.jitter_peaks_found, ref_stats.jitter_peaks_found());
- ASSERT_EQ(stats.packet_loss_rate, ref_stats.packet_loss_rate());
- ASSERT_EQ(stats.packet_discard_rate, ref_stats.packet_discard_rate());
- ASSERT_EQ(stats.expand_rate, ref_stats.expand_rate());
- ASSERT_EQ(stats.preemptive_rate, ref_stats.preemptive_rate());
- ASSERT_EQ(stats.accelerate_rate, ref_stats.accelerate_rate());
- ASSERT_EQ(stats.clockdrift_ppm, ref_stats.clockdrift_ppm());
- ASSERT_EQ(stats.added_zero_samples, ref_stats.added_zero_samples());
- ASSERT_EQ(stats.secondary_decoded_rate, ref_stats.secondary_decoded_rate());
- ASSERT_LE(stats.speech_expand_rate, ref_stats.expand_rate());
-#else
- FAIL() << "Reading from reference file requires Proto Buffer.";
-#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
-}
-
-void RefFiles::WriteToFile(const RtcpStatistics& stats_raw) {
-#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!output_fp_)
- return;
neteq_unittest::RtcpStatistics stats;
Convert(stats_raw, &stats);
std::string stats_string;
ASSERT_TRUE(stats.SerializeToString(&stats_string));
- WriteMessage(output_fp_, stats_string);
+ AddMessage(output_fp_, digest_.get(), stats_string);
#else
FAIL() << "Writing to reference file requires Proto Buffer.";
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::ReadFromFileAndCompare(const RtcpStatistics& stats) {
-#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!input_fp_)
- return;
- std::string stats_string;
- ReadMessage(input_fp_, &stats_string);
- neteq_unittest::RtcpStatistics ref_stats;
- ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
-
- // Compare
- ASSERT_EQ(stats.fraction_lost, ref_stats.fraction_lost());
- ASSERT_EQ(stats.cumulative_lost, ref_stats.cumulative_lost());
- ASSERT_EQ(stats.extended_max_sequence_number,
- ref_stats.extended_max_sequence_number());
- ASSERT_EQ(stats.jitter, ref_stats.jitter());
-#else
- FAIL() << "Reading from reference file requires Proto Buffer.";
-#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
+void ResultSink::VerifyChecksum(const std::string& checksum) {
+ std::vector<char> buffer;
+ buffer.resize(digest_->Size());
+ digest_->Finish(&buffer[0], buffer.size());
+ const std::string result = rtc::hex_encode(&buffer[0], digest_->Size());
+ EXPECT_EQ(checksum, result);
}
class NetEqDecodingTest : public ::testing::Test {
@@ -286,14 +256,14 @@ class NetEqDecodingTest : public ::testing::Test {
virtual void SetUp();
virtual void TearDown();
void SelectDecoders(NetEqDecoder* used_codec);
- void LoadDecoders();
void OpenInputFile(const std::string &rtp_file);
void Process();
void DecodeAndCompare(const std::string& rtp_file,
- const std::string& ref_file,
- const std::string& stat_ref_file,
- const std::string& rtcp_ref_file);
+ const std::string& output_checksum,
+ const std::string& network_stats_checksum,
+ const std::string& rtcp_stats_checksum,
+ bool gen_ref);
static void PopulateRtpInfo(int frame_index,
int timestamp,
@@ -350,56 +320,13 @@ void NetEqDecodingTest::SetUp() {
ASSERT_EQ(0, neteq_->NetworkStatistics(&stat));
algorithmic_delay_ms_ = stat.current_buffer_size_ms;
ASSERT_TRUE(neteq_);
- LoadDecoders();
+ LoadDecoders(neteq_);
}
void NetEqDecodingTest::TearDown() {
delete neteq_;
}
-void NetEqDecodingTest::LoadDecoders() {
- // Load PCMu.
- ASSERT_EQ(0,
- neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMu, "pcmu", 0));
- // Load PCMa.
- ASSERT_EQ(0,
- neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMa, "pcma", 8));
-#ifdef WEBRTC_CODEC_ILBC
- // Load iLBC.
- ASSERT_EQ(
- 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderILBC, "ilbc", 102));
-#endif
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
- // Load iSAC.
- ASSERT_EQ(
- 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC, "isac", 103));
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- // Load iSAC SWB.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISACswb,
- "isac-swb", 104));
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderOpus,
- "opus", 111));
-#endif
- // Load PCM16B nb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16B,
- "pcm16-nb", 93));
- // Load PCM16B wb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bwb,
- "pcm16-wb", 94));
- // Load PCM16B swb32.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bswb32kHz,
- "pcm16-swb32", 95));
- // Load CNG 8 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGnb,
- "cng-nb", 13));
- // Load CNG 16 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGwb,
- "cng-wb", 98));
-}
-
void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
rtp_source_.reset(test::RtpFileSource::Create(rtp_file));
}
@@ -426,7 +353,9 @@ void NetEqDecodingTest::Process() {
}
// Get audio from NetEq.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
ASSERT_TRUE((out_frame_.samples_per_channel_ == kBlockSize8kHz) ||
(out_frame_.samples_per_channel_ == kBlockSize16kHz) ||
(out_frame_.samples_per_channel_ == kBlockSize32kHz) ||
@@ -438,29 +367,25 @@ void NetEqDecodingTest::Process() {
sim_clock_ += kTimeStepMs;
}
-void NetEqDecodingTest::DecodeAndCompare(const std::string& rtp_file,
- const std::string& ref_file,
- const std::string& stat_ref_file,
- const std::string& rtcp_ref_file) {
+void NetEqDecodingTest::DecodeAndCompare(
+ const std::string& rtp_file,
+ const std::string& output_checksum,
+ const std::string& network_stats_checksum,
+ const std::string& rtcp_stats_checksum,
+ bool gen_ref) {
OpenInputFile(rtp_file);
- std::string ref_out_file = "";
- if (ref_file.empty()) {
- ref_out_file = webrtc::test::OutputPath() + "neteq_universal_ref.pcm";
- }
- RefFiles ref_files(ref_file, ref_out_file);
+ std::string ref_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_universal_ref.pcm" : "";
+ ResultSink output(ref_out_file);
- std::string stat_out_file = "";
- if (stat_ref_file.empty()) {
- stat_out_file = webrtc::test::OutputPath() + "neteq_network_stats.dat";
- }
- RefFiles network_stat_files(stat_ref_file, stat_out_file);
+ std::string stat_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_network_stats.dat" : "";
+ ResultSink network_stats(stat_out_file);
- std::string rtcp_out_file = "";
- if (rtcp_ref_file.empty()) {
- rtcp_out_file = webrtc::test::OutputPath() + "neteq_rtcp_stats.dat";
- }
- RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
+ std::string rtcp_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_rtcp_stats.dat" : "";
+ ResultSink rtcp_stats(rtcp_out_file);
packet_.reset(rtp_source_->NextPacket());
int i = 0;
@@ -469,25 +394,33 @@ void NetEqDecodingTest::DecodeAndCompare(const std::string& rtp_file,
ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
ASSERT_NO_FATAL_FAILURE(Process());
- ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(
+ ASSERT_NO_FATAL_FAILURE(output.AddResult(
out_frame_.data_, out_frame_.samples_per_channel_));
// Query the network statistics API once per second
if (sim_clock_ % 1000 == 0) {
// Process NetworkStatistics.
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- ASSERT_NO_FATAL_FAILURE(
- network_stat_files.ProcessReference(network_stats));
+ NetEqNetworkStatistics current_network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&current_network_stats));
+ ASSERT_NO_FATAL_FAILURE(network_stats.AddResult(current_network_stats));
+
// Compare with CurrentDelay, which should be identical.
- EXPECT_EQ(network_stats.current_buffer_size_ms, neteq_->CurrentDelayMs());
+ EXPECT_EQ(current_network_stats.current_buffer_size_ms,
+ neteq_->CurrentDelayMs());
// Process RTCPstat.
- RtcpStatistics rtcp_stats;
- neteq_->GetRtcpStatistics(&rtcp_stats);
- ASSERT_NO_FATAL_FAILURE(rtcp_stat_files.ProcessReference(rtcp_stats));
+ RtcpStatistics current_rtcp_stats;
+ neteq_->GetRtcpStatistics(&current_rtcp_stats);
+ ASSERT_NO_FATAL_FAILURE(rtcp_stats.AddResult(current_rtcp_stats));
}
}
+
+ SCOPED_TRACE("Check output audio.");
+ output.VerifyChecksum(output_checksum);
+ SCOPED_TRACE("Check network stats.");
+ network_stats.VerifyChecksum(network_stats_checksum);
+ SCOPED_TRACE("Check rtcp stats.");
+ rtcp_stats.VerifyChecksum(rtcp_stats_checksum);
}
void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
@@ -525,31 +458,30 @@ void NetEqDecodingTest::PopulateCng(int frame_index,
TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
const std::string input_rtp_file =
webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp");
- // Note that neteq4_universal_ref.pcm and neteq4_universal_ref_win_32.pcm
- // are identical. The latter could have been removed, but if clients still
- // have a copy of the file, the test will fail.
- const std::string input_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm");
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string network_stat_ref_file = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq4_network_stats.dat";
-#else
- const std::string network_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat");
-#endif
- const std::string rtcp_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat");
-
- if (FLAGS_gen_ref) {
- DecodeAndCompare(input_rtp_file, "", "", "");
- } else {
- DecodeAndCompare(input_rtp_file,
- input_ref_file,
- network_stat_ref_file,
- rtcp_stat_ref_file);
- }
+
+ const std::string output_checksum = PlatformChecksum(
+ "472ebe1126f41fdb6b5c63c87f625a52e7604e49",
+ "d2a6b6ff54b340cf9f961c7f07768d86b3761073",
+ "472ebe1126f41fdb6b5c63c87f625a52e7604e49",
+ "f9749813dbc3fb59dae761de518fec65b8407c5b");
+
+ const std::string network_stats_checksum = PlatformChecksum(
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9",
+ "01be67dc4c3b8e74743a45cbd8684c0535dec9ad",
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9",
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9");
+
+ const std::string rtcp_stats_checksum = PlatformChecksum(
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d",
+ "f3f7b3d3e71d7e635240b5373b57df6a7e4ce9d4",
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d",
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d");
+
+ DecodeAndCompare(input_rtp_file,
+ output_checksum,
+ network_stats_checksum,
+ rtcp_stats_checksum,
+ FLAGS_gen_ref);
}
#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) && \
@@ -562,26 +494,30 @@ TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) {
const std::string input_rtp_file =
webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp");
- const std::string input_ref_file =
- // The pcm files were generated by using Opus v1.1.2 to decode the RTC
- // file generated by Opus v1.1
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_ref", "pcm");
- const std::string network_stat_ref_file =
- // The network stats file was generated when using Opus v1.1.2 to decode
- // the RTC file generated by Opus v1.1
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_network_stats",
- "dat");
- const std::string rtcp_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_rtcp_stats", "dat");
-
- if (FLAGS_gen_ref) {
- DecodeAndCompare(input_rtp_file, "", "", "");
- } else {
- DecodeAndCompare(input_rtp_file,
- input_ref_file,
- network_stat_ref_file,
- rtcp_stat_ref_file);
- }
+
+ const std::string output_checksum = PlatformChecksum(
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4");
+
+ const std::string network_stats_checksum = PlatformChecksum(
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39");
+
+ const std::string rtcp_stats_checksum = PlatformChecksum(
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0");
+
+ DecodeAndCompare(input_rtp_file,
+ output_checksum,
+ network_stats_checksum,
+ rtcp_stats_checksum,
+ FLAGS_gen_ref);
}
// Use fax mode to avoid time-scaling. This is to simplify the testing of
@@ -610,7 +546,8 @@ TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
}
// Pull out all data.
for (size_t i = 0; i < num_frames; ++i) {
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -651,7 +588,8 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -678,7 +616,8 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -699,6 +638,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
const size_t kPayloadBytes = kSamples * 2;
double next_input_time_ms = 0.0;
double t_ms;
+ bool muted;
// Insert speech for 5 seconds.
const int kSpeechDurationMs = 5000;
@@ -715,7 +655,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += static_cast<double>(kFrameSizeMs) * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -744,7 +684,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += static_cast<double>(kCngPeriodMs) * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -757,7 +697,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
const double loop_end_time = t_ms + network_freeze_ms;
for (; t_ms < loop_end_time; t_ms += 10) {
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
}
@@ -769,7 +709,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
if (pull_once && next_input_time_ms >= pull_time_ms) {
pull_once = false;
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
t_ms += 10;
@@ -803,7 +743,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += kFrameSizeMs * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
// Increase clock.
t_ms += 10;
@@ -931,7 +871,9 @@ TEST_F(NetEqDecodingTest, MAYBE_DecoderError) {
for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) {
out_frame_.data_[i] = 1;
}
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
// Verify that there is a decoder error to check.
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
@@ -968,7 +910,9 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) {
out_frame_.data_[i] = 1;
}
- EXPECT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
// Verify that the first block of samples is set to 0.
static const int kExpectedOutputLength =
kInitSampleRateHz / 100; // 10 ms at initial sample rate.
@@ -1020,6 +964,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
rtp_info.header.payloadType = payload_type;
uint32_t receive_timestamp = 0;
+ bool muted;
for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
auto block = input.GetNextBlock();
ASSERT_EQ(expected_samples_per_channel, block.size());
@@ -1031,7 +976,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
payload, enc_len_bytes),
receive_timestamp));
output.Reset();
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
ASSERT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1047,7 +992,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
// Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull
// one frame without checking speech-type. This is the first frame pulled
// without inserting any packet, and might not be labeled as PLC.
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
@@ -1062,7 +1007,8 @@ class NetEqBgnTest : public NetEqDecodingTest {
for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) {
output.Reset();
memset(output.data_, 1, sizeof(output.data_)); // Set to non-zero.
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
if (output.speech_type_ == AudioFrame::kPLCCNG) {
@@ -1236,9 +1182,10 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// Insert some packets which decode to noise. We are not interested in
// actual decoded values.
uint32_t receive_timestamp = 0;
+ bool muted;
for (int n = 0; n < 100; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
@@ -1254,7 +1201,8 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// Insert sync-packets, the decoded sequence should be all-zero.
for (int n = 0; n < kNumSyncPackets; ++n) {
ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
if (n > algorithmic_frame_delay) {
@@ -1270,7 +1218,8 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// network statistics would show some packet loss.
for (int n = 0; n <= algorithmic_frame_delay + 10; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
if (n >= algorithmic_frame_delay + 1) {
// Expect that this frame contain samples from regular RTP.
EXPECT_TRUE(IsAllNonZero(
@@ -1306,9 +1255,10 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// actual decoded values.
uint32_t receive_timestamp = 0;
int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
+ bool muted;
for (int n = 0; n < algorithmic_frame_delay; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
rtp_info.header.sequenceNumber++;
@@ -1345,7 +1295,8 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// Decode.
for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
EXPECT_TRUE(IsAllNonZero(
@@ -1412,7 +1363,8 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
}
// Pull out data once.
AudioFrame output;
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
@@ -1468,6 +1420,7 @@ void NetEqDecodingTest::DuplicateCng() {
// correct.
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
+ bool muted;
for (int i = 0; i < 3; ++i) {
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
@@ -1475,7 +1428,7 @@ void NetEqDecodingTest::DuplicateCng() {
timestamp += kSamples;
// Pull audio once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
// Verify speech output.
@@ -1492,7 +1445,7 @@ void NetEqDecodingTest::DuplicateCng() {
rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
// Pull audio once and make sure CNG is played.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
EXPECT_FALSE(PlayoutTimestamp()); // Returns empty value during CNG.
@@ -1508,7 +1461,7 @@ void NetEqDecodingTest::DuplicateCng() {
// Pull audio until we have played |kCngPeriodMs| of CNG. Start at 10 ms since
// we have already pulled out CNG once.
for (int cng_time_ms = 10; cng_time_ms < kCngPeriodMs; cng_time_ms += 10) {
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
EXPECT_FALSE(PlayoutTimestamp()); // Returns empty value during CNG.
@@ -1523,7 +1476,7 @@ void NetEqDecodingTest::DuplicateCng() {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
// Pull audio once and verify that the output is speech again.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
rtc::Optional<uint32_t> playout_timestamp = PlayoutTimestamp();
@@ -1561,7 +1514,8 @@ TEST_F(NetEqDecodingTest, CngFirst) {
timestamp += kCngPeriodSamples;
// Pull audio once and make sure CNG is played.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
@@ -1573,10 +1527,261 @@ TEST_F(NetEqDecodingTest, CngFirst) {
timestamp += kSamples;
// Pull audio once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
// Verify speech output.
EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
}
+
+class NetEqDecodingTestWithMutedState : public NetEqDecodingTest {
+ public:
+ NetEqDecodingTestWithMutedState() : NetEqDecodingTest() {
+ config_.enable_muted_state = true;
+ }
+
+ protected:
+ static constexpr size_t kSamples = 10 * 16;
+ static constexpr size_t kPayloadBytes = kSamples * 2;
+
+ void InsertPacket(uint32_t rtp_timestamp) {
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, rtp_timestamp, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ }
+
+ bool GetAudioReturnMuted() {
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ return muted;
+ }
+
+ void GetAudioUntilMuted() {
+ while (!GetAudioReturnMuted()) {
+ ASSERT_LT(counter_++, 1000) << "Test timed out";
+ }
+ }
+
+ void GetAudioUntilNormal() {
+ bool muted = false;
+ while (out_frame_.speech_type_ != AudioFrame::kNormalSpeech) {
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_LT(counter_++, 1000) << "Test timed out";
+ }
+ EXPECT_FALSE(muted);
+ }
+
+ int counter_ = 0;
+};
+
+// Verifies that NetEq goes in and out of muted state as expected.
+TEST_F(NetEqDecodingTestWithMutedState, MutedState) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+
+ // Verify that output audio is not written during muted mode. Other parameters
+ // should be correct, though.
+ AudioFrame new_frame;
+ for (auto& d : new_frame.data_) {
+ d = 17;
+ }
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&new_frame, &muted));
+ EXPECT_TRUE(muted);
+ for (auto d : new_frame.data_) {
+ EXPECT_EQ(17, d);
+ }
+ EXPECT_EQ(out_frame_.timestamp_ + out_frame_.samples_per_channel_,
+ new_frame.timestamp_);
+ EXPECT_EQ(out_frame_.samples_per_channel_, new_frame.samples_per_channel_);
+ EXPECT_EQ(out_frame_.sample_rate_hz_, new_frame.sample_rate_hz_);
+ EXPECT_EQ(out_frame_.num_channels_, new_frame.num_channels_);
+ EXPECT_EQ(out_frame_.speech_type_, new_frame.speech_type_);
+ EXPECT_EQ(out_frame_.vad_activity_, new_frame.vad_activity_);
+
+ // Insert new data. Timestamp is corrected for the time elapsed since the last
+ // packet. Verify that normal operation resumes.
+ InsertPacket(kSamples * counter_);
+ GetAudioUntilNormal();
+
+ NetEqNetworkStatistics stats;
+ EXPECT_EQ(0, neteq_->NetworkStatistics(&stats));
+ // NetEqNetworkStatistics::expand_rate tells the fraction of samples that were
+ // concealment samples, in Q14 (16384 = 100%) .The vast majority should be
+ // concealment samples in this test.
+ EXPECT_GT(stats.expand_rate, 14000);
+ // And, it should be greater than the speech_expand_rate.
+ EXPECT_GT(stats.expand_rate, stats.speech_expand_rate);
+}
+
+// Verifies that NetEq goes out of muted state when given a delayed packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateDelayedPacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+ // Insert new data. Timestamp is only corrected for the half of the time
+ // elapsed since the last packet. That is, the new packet is delayed. Verify
+ // that normal operation resumes.
+ InsertPacket(kSamples * counter_ / 2);
+ GetAudioUntilNormal();
+}
+
+// Verifies that NetEq goes out of muted state when given a future packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateFuturePacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+ // Insert new data. Timestamp is over-corrected for the time elapsed since the
+ // last packet. That is, the new packet is too early. Verify that normal
+ // operation resumes.
+ InsertPacket(kSamples * counter_ * 2);
+ GetAudioUntilNormal();
+}
+
+// Verifies that NetEq goes out of muted state when given an old packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateOldPacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+
+ EXPECT_NE(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
+ // Insert packet which is older than the first packet.
+ InsertPacket(kSamples * (counter_ - 1000));
+ EXPECT_FALSE(GetAudioReturnMuted());
+ EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
+}
+
+class NetEqDecodingTestTwoInstances : public NetEqDecodingTest {
+ public:
+ NetEqDecodingTestTwoInstances() : NetEqDecodingTest() {}
+
+ void SetUp() override {
+ NetEqDecodingTest::SetUp();
+ config2_ = config_;
+ }
+
+ void CreateSecondInstance() {
+ neteq2_.reset(NetEq::Create(config2_));
+ ASSERT_TRUE(neteq2_);
+ LoadDecoders(neteq2_.get());
+ }
+
+ protected:
+ std::unique_ptr<NetEq> neteq2_;
+ NetEq::Config config2_;
+};
+
+namespace {
+::testing::AssertionResult AudioFramesEqualExceptData(const AudioFrame& a,
+ const AudioFrame& b) {
+ if (a.timestamp_ != b.timestamp_)
+ return ::testing::AssertionFailure() << "timestamp_ diff (" << a.timestamp_
+ << " != " << b.timestamp_ << ")";
+ if (a.sample_rate_hz_ != b.sample_rate_hz_)
+ return ::testing::AssertionFailure() << "sample_rate_hz_ diff ("
+ << a.sample_rate_hz_
+ << " != " << b.sample_rate_hz_ << ")";
+ if (a.samples_per_channel_ != b.samples_per_channel_)
+ return ::testing::AssertionFailure()
+ << "samples_per_channel_ diff (" << a.samples_per_channel_
+ << " != " << b.samples_per_channel_ << ")";
+ if (a.num_channels_ != b.num_channels_)
+ return ::testing::AssertionFailure() << "num_channels_ diff ("
+ << a.num_channels_
+ << " != " << b.num_channels_ << ")";
+ if (a.speech_type_ != b.speech_type_)
+ return ::testing::AssertionFailure() << "speech_type_ diff ("
+ << a.speech_type_
+ << " != " << b.speech_type_ << ")";
+ if (a.vad_activity_ != b.vad_activity_)
+ return ::testing::AssertionFailure() << "vad_activity_ diff ("
+ << a.vad_activity_
+ << " != " << b.vad_activity_ << ")";
+ return ::testing::AssertionSuccess();
+}
+
+::testing::AssertionResult AudioFramesEqual(const AudioFrame& a,
+ const AudioFrame& b) {
+ ::testing::AssertionResult res = AudioFramesEqualExceptData(a, b);
+ if (!res)
+ return res;
+ if (memcmp(
+ a.data_, b.data_,
+ a.samples_per_channel_ * a.num_channels_ * sizeof(a.data_[0])) != 0) {
+ return ::testing::AssertionFailure() << "data_ diff";
+ }
+ return ::testing::AssertionSuccess();
+}
+
+} // namespace
+
+TEST_F(NetEqDecodingTestTwoInstances, CompareMutedStateOnOff) {
+ ASSERT_FALSE(config_.enable_muted_state);
+ config2_.enable_muted_state = true;
+ CreateSecondInstance();
+
+ // Insert one speech packet into both NetEqs.
+ const size_t kSamples = 10 * 16;
+ const size_t kPayloadBytes = kSamples * 2;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload, 0));
+
+ AudioFrame out_frame1, out_frame2;
+ bool muted;
+ for (int i = 0; i < 1000; ++i) {
+ std::ostringstream ss;
+ ss << "i = " << i;
+ SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure.
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted));
+ EXPECT_FALSE(muted);
+ EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted));
+ if (muted) {
+ EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2));
+ } else {
+ EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2));
+ }
+ }
+ EXPECT_TRUE(muted);
+
+ // Insert new data. Timestamp is corrected for the time elapsed since the last
+ // packet.
+ PopulateRtpInfo(0, kSamples * 1000, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload, 0));
+
+ int counter = 0;
+ while (out_frame1.speech_type_ != AudioFrame::kNormalSpeech) {
+ ASSERT_LT(counter++, 1000) << "Test timed out";
+ std::ostringstream ss;
+ ss << "counter = " << counter;
+ SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure.
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted));
+ EXPECT_FALSE(muted);
+ EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted));
+ if (muted) {
+ EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2));
+ } else {
+ EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2));
+ }
+ }
+ EXPECT_FALSE(muted);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
index 9bddfe77657..f99b3f200f4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
@@ -16,7 +16,6 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
@@ -43,7 +42,6 @@ int Normal::Process(const int16_t* input,
return 0;
}
output->PushBackInterleaved(input, length);
- int16_t* signal = &(*output)[0][0];
const int fs_mult = fs_hz_ / 8000;
assert(fs_mult > 0);
@@ -64,24 +62,26 @@ int Normal::Process(const int16_t* input,
expand_->Process(&expanded);
expand_->Reset();
+ size_t length_per_channel = length / output->Channels();
+ std::unique_ptr<int16_t[]> signal(new int16_t[length_per_channel]);
for (size_t channel_ix = 0; channel_ix < output->Channels(); ++channel_ix) {
// Adjust muting factor (main muting factor times expand muting factor).
external_mute_factor_array[channel_ix] = static_cast<int16_t>(
(external_mute_factor_array[channel_ix] *
expand_->MuteFactor(channel_ix)) >> 14);
- int16_t* signal = &(*output)[channel_ix][0];
- size_t length_per_channel = length / output->Channels();
+ (*output)[channel_ix].CopyTo(length_per_channel, 0, signal.get());
+
// Find largest absolute value in new data.
int16_t decoded_max =
- WebRtcSpl_MaxAbsValueW16(signal, length_per_channel);
+ WebRtcSpl_MaxAbsValueW16(signal.get(), length_per_channel);
// Adjust muting factor if needed (to BGN level).
size_t energy_length =
std::min(static_cast<size_t>(fs_mult * 64), length_per_channel);
int scaling = 6 + fs_shift
- WebRtcSpl_NormW32(decoded_max * decoded_max);
scaling = std::max(scaling, 0); // |scaling| should always be >= 0.
- int32_t energy = WebRtcSpl_DotProductWithScale(signal, signal,
+ int32_t energy = WebRtcSpl_DotProductWithScale(signal.get(), signal.get(),
energy_length, scaling);
int32_t scaled_energy_length =
static_cast<int32_t>(energy_length >> scaling);
@@ -149,19 +149,18 @@ int Normal::Process(const int16_t* input,
int16_t cng_output[kCngLength];
// Reset mute factor and start up fresh.
external_mute_factor_array[0] = 16384;
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder) {
// Generate long enough for 32kHz.
- if (WebRtcCng_Generate(cng_decoder->CngDecoderInstance(), cng_output,
- kCngLength, 0) < 0) {
+ if (!cng_decoder->Generate(cng_output, 0)) {
// Error returned; set return vector to all zeros.
memset(cng_output, 0, sizeof(cng_output));
}
} else {
// If no CNG instance is defined, just copy from the decoded data.
// (This will result in interpolating the decoded with itself.)
- memcpy(cng_output, signal, fs_mult * 8 * sizeof(int16_t));
+ (*output)[0].CopyTo(fs_mult * 8, 0, cng_output);
}
// Interpolate the CNG into the new vector.
// (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
@@ -171,8 +170,8 @@ int Normal::Process(const int16_t* input,
for (size_t i = 0; i < static_cast<size_t>(8 * fs_mult); i++) {
// TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8 now
// for legacy bit-exactness.
- signal[i] =
- (fraction * signal[i] + (32 - fraction) * cng_output[i] + 8) >> 5;
+ (*output)[0][i] = (fraction * (*output)[0][i] +
+ (32 - fraction) * cng_output[i] + 8) >> 5;
fraction += increment;
}
} else if (external_mute_factor_array[0] < 16384) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
index f98e99a82d8..5e1fc131e50 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
@@ -27,9 +27,20 @@
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
using ::testing::_;
+using ::testing::Invoke;
namespace webrtc {
+namespace {
+
+int ExpandProcess120ms(AudioMultiVector* output) {
+ AudioMultiVector dummy_audio(1, 11520u);
+ dummy_audio.CopyTo(output);
+ return 0;
+}
+
+} // namespace
+
TEST(Normal, CreateAndDestroy) {
MockDecoderDatabase db;
int fs = 8000;
@@ -121,6 +132,45 @@ TEST(Normal, InputLengthAndChannelsDoNotMatch) {
EXPECT_CALL(expand, Die()); // Called when |expand| goes out of scope.
}
+TEST(Normal, LastModeExpand120msPacket) {
+ WebRtcSpl_Init();
+ MockDecoderDatabase db;
+ const int kFs = 48000;
+ const size_t kPacketsizeBytes = 11520u;
+ const size_t kChannels = 1;
+ BackgroundNoise bgn(kChannels);
+ SyncBuffer sync_buffer(kChannels, 1000);
+ RandomVector random_vector;
+ StatisticsCalculator statistics;
+ MockExpand expand(&bgn, &sync_buffer, &random_vector, &statistics, kFs,
+ kChannels);
+ Normal normal(kFs, &db, bgn, &expand);
+
+ int16_t input[kPacketsizeBytes] = {0};
+
+ std::unique_ptr<int16_t[]> mute_factor_array(new int16_t[kChannels]);
+ for (size_t i = 0; i < kChannels; ++i) {
+ mute_factor_array[i] = 16384;
+ }
+
+ AudioMultiVector output(kChannels);
+
+ EXPECT_CALL(expand, SetParametersForNormalAfterExpand());
+ EXPECT_CALL(expand, Process(_)).WillOnce(Invoke(ExpandProcess120ms));
+ EXPECT_CALL(expand, Reset());
+ EXPECT_EQ(static_cast<int>(kPacketsizeBytes),
+ normal.Process(input,
+ kPacketsizeBytes,
+ kModeExpand,
+ mute_factor_array.get(),
+ &output));
+
+ EXPECT_EQ(kPacketsizeBytes, output.Size());
+
+ EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
+ EXPECT_CALL(expand, Die()); // Called when |expand| goes out of scope.
+}
+
// TODO(hlundin): Write more tests.
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc
index c62a62f39aa..8a19fe4d592 100644
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-int main(int argc, char* argv[]) {
- // TODO(leozwang): Video render test app is not ready on android,
- // make it dummy test now, will add android specific tests
- return 0;
-}
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+
+namespace webrtc {
+
+Packet::Packet() = default;
+
+Packet::~Packet() = default;
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
index 64b325e027a..d6f64c7e088 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
@@ -12,7 +12,9 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
#include <list>
+#include <memory>
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
@@ -21,20 +23,15 @@ namespace webrtc {
// Struct for holding RTP packets.
struct Packet {
RTPHeader header;
- uint8_t* payload; // Datagram excluding RTP header and header extension.
- size_t payload_length;
- bool primary; // Primary, i.e., not redundant payload.
- int waiting_time;
- bool sync_packet;
+ // Datagram excluding RTP header and header extension.
+ uint8_t* payload = nullptr;
+ size_t payload_length = 0;
+ bool primary = true; // Primary, i.e., not redundant payload.
+ bool sync_packet = false;
+ std::unique_ptr<TickTimer::Stopwatch> waiting_time;
- // Constructor.
- Packet()
- : payload(NULL),
- payload_length(0),
- primary(true),
- waiting_time(0),
- sync_packet(false) {
- }
+ Packet();
+ ~Packet();
// Comparison operators. Establish a packet ordering based on (1) timestamp,
// (2) sequence number, (3) regular packet vs sync-packet and (4) redundancy.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
index c89de12318b..f1b898e34cf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
@@ -19,6 +19,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
@@ -37,8 +38,9 @@ class NewTimestampIsLarger {
const Packet* new_packet_;
};
-PacketBuffer::PacketBuffer(size_t max_number_of_packets)
- : max_number_of_packets_(max_number_of_packets) {}
+PacketBuffer::PacketBuffer(size_t max_number_of_packets,
+ const TickTimer* tick_timer)
+ : max_number_of_packets_(max_number_of_packets), tick_timer_(tick_timer) {}
// Destructor. All packets in the buffer will be destroyed.
PacketBuffer::~PacketBuffer() {
@@ -65,6 +67,8 @@ int PacketBuffer::InsertPacket(Packet* packet) {
int return_val = kOK;
+ packet->waiting_time = tick_timer_->GetNewStopwatch();
+
if (buffer_.size() >= max_number_of_packets_) {
// Buffer is full. Flush it.
Flush();
@@ -268,13 +272,6 @@ size_t PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
return num_samples;
}
-void PacketBuffer::IncrementWaitingTimes(int inc) {
- PacketList::iterator it;
- for (it = buffer_.begin(); it != buffer_.end(); ++it) {
- (*it)->waiting_time += inc;
- }
-}
-
bool PacketBuffer::DeleteFirstPacket(PacketList* packet_list) {
if (packet_list->empty()) {
return false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
index 03c11e61b6e..6867b4cb37e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -17,8 +17,8 @@
namespace webrtc {
-// Forward declaration.
class DecoderDatabase;
+class TickTimer;
// This is the actual buffer holding the packets before decoding.
class PacketBuffer {
@@ -34,7 +34,7 @@ class PacketBuffer {
// Constructor creates a buffer which can hold a maximum of
// |max_number_of_packets| packets.
- PacketBuffer(size_t max_number_of_packets);
+ PacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer);
// Deletes all packets in the buffer before destroying the buffer.
virtual ~PacketBuffer();
@@ -116,10 +116,6 @@ class PacketBuffer {
virtual size_t NumSamplesInBuffer(DecoderDatabase* decoder_database,
size_t last_decoded_length) const;
- // Increase the waiting time counter for every packet in the buffer by |inc|.
- // The default value for |inc| is 1.
- virtual void IncrementWaitingTimes(int inc = 1);
-
virtual void BufferStat(int* num_packets, int* max_num_packets) const;
// Static method that properly deletes the first packet, and its payload
@@ -148,6 +144,7 @@ class PacketBuffer {
private:
size_t max_number_of_packets_;
PacketList buffer_;
+ const TickTimer* tick_timer_;
RTC_DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
index 435b6c848dc..da353010857 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -16,6 +16,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
using ::testing::Return;
using ::testing::_;
@@ -80,13 +81,15 @@ struct PacketsToInsert {
// Start of test definitions.
TEST(PacketBuffer, CreateAndDestroy) {
- PacketBuffer* buffer = new PacketBuffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer* buffer = new PacketBuffer(10, &tick_timer); // 10 packets.
EXPECT_TRUE(buffer->Empty());
delete buffer;
}
TEST(PacketBuffer, InsertPacket) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(17u, 4711u, 0, 10);
const int payload_len = 100;
@@ -107,7 +110,8 @@ TEST(PacketBuffer, InsertPacket) {
// Test to flush buffer.
TEST(PacketBuffer, FlushBuffer) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
const int payload_len = 10;
@@ -127,7 +131,8 @@ TEST(PacketBuffer, FlushBuffer) {
// Test to fill the buffer over the limits, and verify that it flushes.
TEST(PacketBuffer, OverfillBuffer) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
// Insert 10 small packets; should be ok.
@@ -156,7 +161,8 @@ TEST(PacketBuffer, OverfillBuffer) {
// Test inserting a list of packets.
TEST(PacketBuffer, InsertPacketList) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -192,7 +198,8 @@ TEST(PacketBuffer, InsertPacketList) {
// Expecting the buffer to flush.
// TODO(hlundin): Remove this test when legacy operation is no longer needed.
TEST(PacketBuffer, InsertPacketListChangePayloadType) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -230,7 +237,8 @@ TEST(PacketBuffer, InsertPacketListChangePayloadType) {
}
TEST(PacketBuffer, ExtractOrderRedundancy) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const int kPackets = 18;
const int kFrameSize = 10;
const int kPayloadLength = 10;
@@ -289,7 +297,8 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
}
TEST(PacketBuffer, DiscardPackets) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -318,7 +327,8 @@ TEST(PacketBuffer, DiscardPackets) {
}
TEST(PacketBuffer, Reordering) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -373,8 +383,9 @@ TEST(PacketBuffer, Failures) {
const uint32_t ts_increment = 10;
int payload_len = 100;
PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
+ TickTimer tick_timer;
- PacketBuffer* buffer = new PacketBuffer(100); // 100 packets.
+ PacketBuffer* buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
Packet* packet = NULL;
EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
packet = gen.NextPacket(payload_len);
@@ -404,7 +415,7 @@ TEST(PacketBuffer, Failures) {
// Insert packet list of three packets, where the second packet has an invalid
// payload. Expect first packet to be inserted, and the remaining two to be
// discarded.
- buffer = new PacketBuffer(100); // 100 packets.
+ buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
PacketList list;
list.push_back(gen.NextPacket(payload_len)); // Valid packet.
packet = gen.NextPacket(payload_len);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
index 85307181341..530e9d064dc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
@@ -12,6 +12,7 @@
#include <assert.h>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
@@ -143,8 +144,9 @@ int PayloadSplitter::SplitFec(PacketList* packet_list,
// Not an FEC packet.
AudioDecoder* decoder = decoder_database->GetDecoder(payload_type);
- // decoder should not return NULL.
- assert(decoder != NULL);
+ // decoder should not return NULL, except for comfort noise payloads which
+ // are handled separately.
+ assert(decoder != NULL || decoder_database->IsComfortNoise(payload_type));
if (!decoder ||
!decoder->PacketHasFec(packet->payload, packet->payload_length)) {
++it;
@@ -167,8 +169,9 @@ int PayloadSplitter::SplitFec(PacketList* packet_list,
memcpy(new_packet->payload, packet->payload, packet->payload_length);
new_packet->payload_length = packet->payload_length;
new_packet->primary = false;
- new_packet->waiting_time = packet->waiting_time;
new_packet->sync_packet = packet->sync_packet;
+ // Waiting time should not be set here.
+ RTC_DCHECK(!packet->waiting_time);
packet_list->insert(it, new_packet);
break;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
index a68e8d68a98..63772452da6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
@@ -18,6 +18,8 @@
#include <utility> // pair
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
@@ -309,7 +311,8 @@ TEST(RedPayloadSplitter, CheckRedPayloads) {
// Use a real DecoderDatabase object here instead of a mock, since it is
// easier to just register the payload types and let the actual implementation
// do its job.
- DecoderDatabase decoder_database;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase decoder_database(std::move(factory));
decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderCNGnb, "cng-nb");
decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
decoder_database.RegisterPayload(2, NetEqDecoder::kDecoderAVT, "avt");
@@ -372,33 +375,33 @@ TEST(AudioPayloadSplitter, NonSplittable) {
// codec types.
// Use scoped pointers to avoid having to delete them later.
std::unique_ptr<DecoderDatabase::DecoderInfo> info0(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISAC, 16000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISAC, "", 16000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(0))
.WillRepeatedly(Return(info0.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info1(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISACswb, 32000,
- NULL, false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISACswb, "", 32000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(1))
.WillRepeatedly(Return(info1.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info2(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderRED, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderRED, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(2))
.WillRepeatedly(Return(info2.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info3(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderAVT, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderAVT, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(3))
.WillRepeatedly(Return(info3.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info4(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderCNGnb, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderCNGnb, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(4))
.WillRepeatedly(Return(info4.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info5(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderArbitrary, 8000,
- NULL, false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderArbitrary, "",
+ 8000, nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(5))
.WillRepeatedly(Return(info5.get()));
@@ -536,7 +539,7 @@ TEST_P(SplitBySamplesTest, PayloadSizes) {
// Use scoped pointers to avoid having to delete them later.
// (Sample rate is set to 8000 Hz, but does not matter.)
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(decoder_type_, 8000, NULL, false));
+ new DecoderDatabase::DecoderInfo(decoder_type_, "", 8000, nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -623,8 +626,8 @@ TEST_P(SplitIlbcTest, NumFrames) {
// codec types.
// Use scoped pointers to avoid having to delete them later.
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -687,8 +690,8 @@ TEST(IlbcPayloadSplitter, TooLargePayload) {
MockDecoderDatabase decoder_database;
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -719,8 +722,8 @@ TEST(IlbcPayloadSplitter, UnevenPayload) {
MockDecoderDatabase decoder_database;
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -743,7 +746,7 @@ TEST(IlbcPayloadSplitter, UnevenPayload) {
TEST(FecPayloadSplitter, MixedPayload) {
PacketList packet_list;
- DecoderDatabase decoder_database;
+ DecoderDatabase decoder_database(CreateBuiltinAudioDecoderFactory());
decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus, "opus");
decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
@@ -798,7 +801,7 @@ TEST(FecPayloadSplitter, MixedPayload) {
TEST(FecPayloadSplitter, EmbedFecInRed) {
PacketList packet_list;
- DecoderDatabase decoder_database;
+ DecoderDatabase decoder_database(CreateBuiltinAudioDecoderFactory());
const int kTimestampOffset = 20 * 48; // 20 ms * 48 kHz.
uint8_t payload_types[] = {0, 0};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
index 45586ee111c..149f2826582 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
@@ -265,7 +265,7 @@ GSMFR_encinst_t* GSMFRenc_inst[2];
#endif
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
-CNG_enc_inst* CNGenc_inst[2];
+webrtc::ComfortNoiseEncoder *CNG_encoder[2];
#endif
#ifdef CODEC_SPEEX_8
SPEEX_encinst_t* SPEEX8enc_inst[2];
@@ -928,18 +928,8 @@ int NetEQTest_init_coders(webrtc::NetEqDecoder coder,
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
- ok = WebRtcCng_CreateEnc(&CNGenc_inst[k]);
- if (ok != 0) {
- printf("Error: Couldn't allocate memory for CNG encoding instance\n");
- exit(0);
- }
if (sampfreq <= 16000) {
- ok = WebRtcCng_InitEnc(CNGenc_inst[k], sampfreq, 200, 5);
- if (ok == -1) {
- printf("Error: Initialization of CNG struct failed. Error code %d\n",
- WebRtcCng_GetErrorCodeEnc(CNGenc_inst[k]));
- exit(0);
- }
+ CNG_encoder[k] = new webrtc::ComfortNoiseEncoder(sampfreq, 200, 5);
}
#endif
@@ -1461,7 +1451,8 @@ int NetEQTest_free_coders(webrtc::NetEqDecoder coder, size_t numChannels) {
WebRtcVad_Free(VAD_inst[k]);
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
- WebRtcCng_FreeEnc(CNGenc_inst[k]);
+ delete CNG_encoder[k];
+ CNG_encoder[k] = nullptr;
#endif
switch (coder) {
@@ -1600,7 +1591,7 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
size_t numChannels) {
size_t cdlen = 0;
int16_t* tempdata;
- static int first_cng = 1;
+ static bool first_cng = true;
size_t tempLen;
*vad = 1;
@@ -1608,9 +1599,9 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
if (useVAD) {
*vad = 0;
- size_t sampleRate_10 = static_cast<size_t>(10 * sampleRate / 1000);
- size_t sampleRate_20 = static_cast<size_t>(20 * sampleRate / 1000);
- size_t sampleRate_30 = static_cast<size_t>(30 * sampleRate / 1000);
+ const size_t sampleRate_10 = static_cast<size_t>(10 * sampleRate / 1000);
+ const size_t sampleRate_20 = static_cast<size_t>(20 * sampleRate / 1000);
+ const size_t sampleRate_30 = static_cast<size_t>(30 * sampleRate / 1000);
for (size_t k = 0; k < numChannels; k++) {
tempLen = frameLen;
tempdata = &indata[k * frameLen];
@@ -1642,16 +1633,22 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
if (!*vad) {
// all channels are silent
+ rtc::Buffer workaround;
cdlen = 0;
for (size_t k = 0; k < numChannels; k++) {
- WebRtcCng_Encode(CNGenc_inst[k], &indata[k * frameLen],
- (frameLen <= 640 ? frameLen : 640) /* max 640 */,
- encoded, &tempLen, first_cng);
+ workaround.Clear();
+ tempLen = CNG_encoder[k]->Encode(
+ rtc::ArrayView<const int16_t>(
+ &indata[k * frameLen],
+ (frameLen <= 640 ? frameLen : 640) /* max 640 */),
+ first_cng,
+ &workaround);
+ memcpy(encoded, workaround.data(), tempLen);
encoded += tempLen;
cdlen += tempLen;
}
*vad = 0;
- first_cng = 0;
+ first_cng = false;
return (cdlen);
}
}
@@ -1734,7 +1731,7 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
} // end for
- first_cng = 1;
+ first_cng = true;
return (totalLen);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
index 2ebd1927bc4..62bfc1b3cb5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
@@ -43,8 +43,8 @@ class NetEqIsacQualityTest : public NetEqQualityTest {
NetEqIsacQualityTest();
void SetUp() override;
void TearDown() override;
- virtual int EncodeBlock(int16_t* in_data, size_t block_size_samples,
- rtc::Buffer* payload, size_t max_bytes);
+ int EncodeBlock(int16_t* in_data, size_t block_size_samples,
+ rtc::Buffer* payload, size_t max_bytes) override;
private:
ISACFIX_MainStruct* isac_encoder_;
int bit_rate_kbps_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
index baa0d67aded..a6117a4c5b6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
@@ -103,8 +103,8 @@ class NetEqOpusQualityTest : public NetEqQualityTest {
NetEqOpusQualityTest();
void SetUp() override;
void TearDown() override;
- virtual int EncodeBlock(int16_t* in_data, size_t block_size_samples,
- rtc::Buffer* payload, size_t max_bytes);
+ int EncodeBlock(int16_t* in_data, size_t block_size_samples,
+ rtc::Buffer* payload, size_t max_bytes) override;
private:
WebRtcOpusEncInst* opus_encoder_;
OpusRepacketizer* repacketizer_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc
new file mode 100644
index 00000000000..4a1b9b7b1fe
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
+
+namespace webrtc {
+
+TickTimer::Stopwatch::Stopwatch(const TickTimer& ticktimer)
+ : ticktimer_(ticktimer), starttick_(ticktimer.ticks()) {}
+
+TickTimer::Countdown::Countdown(const TickTimer& ticktimer,
+ uint64_t ticks_to_count)
+ : stopwatch_(ticktimer.GetNewStopwatch()),
+ ticks_to_count_(ticks_to_count) {}
+
+TickTimer::Countdown::~Countdown() = default;
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h
new file mode 100644
index 00000000000..8f17f435967
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
+
+#include <memory>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Implements a time counter. The counter is advanced with the Increment()
+// methods, and is queried with the ticks() accessor. It is assumed that one
+// "tick" och the counter corresponds to 10 ms.
+// A TickTimer object can provide two types of associated time-measuring
+// objects: Stopwatch and Countdown.
+class TickTimer {
+ public:
+ // Stopwatch measures time elapsed since it was started, by querying the
+ // associated TickTimer for the current time. The intended use is to request a
+ // new Stopwatch object from a TickTimer object with the GetNewStopwatch()
+ // method. Note: since the Stopwatch object contains a reference to the
+ // TickTimer it is associated with, it cannot outlive the TickTimer.
+ class Stopwatch {
+ public:
+ explicit Stopwatch(const TickTimer& ticktimer);
+
+ uint64_t ElapsedTicks() const { return ticktimer_.ticks() - starttick_; }
+
+ uint64_t ElapsedMs() const {
+ const uint64_t elapsed_ticks = ticktimer_.ticks() - starttick_;
+ const int ms_per_tick = ticktimer_.ms_per_tick();
+ return elapsed_ticks < UINT64_MAX / ms_per_tick
+ ? elapsed_ticks * ms_per_tick
+ : UINT64_MAX;
+ }
+
+ private:
+ const TickTimer& ticktimer_;
+ const uint64_t starttick_;
+ };
+
+ // Countdown counts down from a given start value with each tick of the
+ // associated TickTimer, until zero is reached. The Finished() method will
+ // return true if zero has been reached, false otherwise. The intended use is
+ // to request a new Countdown object from a TickTimer object with the
+ // GetNewCountdown() method. Note: since the Countdown object contains a
+ // reference to the TickTimer it is associated with, it cannot outlive the
+ // TickTimer.
+ class Countdown {
+ public:
+ Countdown(const TickTimer& ticktimer, uint64_t ticks_to_count);
+
+ ~Countdown();
+
+ bool Finished() const {
+ return stopwatch_->ElapsedTicks() >= ticks_to_count_;
+ }
+
+ private:
+ const std::unique_ptr<Stopwatch> stopwatch_;
+ const uint64_t ticks_to_count_;
+ };
+
+ TickTimer() : TickTimer(10) {}
+ explicit TickTimer(int ms_per_tick) : ms_per_tick_(ms_per_tick) {
+ RTC_DCHECK_GT(ms_per_tick_, 0);
+ }
+
+ void Increment() { ++ticks_; }
+
+ // Mainly intended for testing.
+ void Increment(uint64_t x) { ticks_ += x; }
+
+ uint64_t ticks() const { return ticks_; }
+
+ int ms_per_tick() const { return ms_per_tick_; }
+
+ // Returns a new Stopwatch object, based on the current TickTimer. Note that
+ // the new Stopwatch object contains a reference to the current TickTimer,
+ // and must therefore not outlive the TickTimer.
+ std::unique_ptr<Stopwatch> GetNewStopwatch() const {
+ return std::unique_ptr<Stopwatch>(new Stopwatch(*this));
+ }
+
+ // Returns a new Countdown object, based on the current TickTimer. Note that
+ // the new Countdown object contains a reference to the current TickTimer,
+ // and must therefore not outlive the TickTimer.
+ std::unique_ptr<Countdown> GetNewCountdown(uint64_t ticks_to_count) const {
+ return std::unique_ptr<Countdown>(new Countdown(*this, ticks_to_count));
+ }
+
+ private:
+ uint64_t ticks_ = 0;
+ const int ms_per_tick_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(TickTimer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc
new file mode 100644
index 00000000000..55edcf5b292
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+
+// Verify that the default value for ms_per_tick is 10.
+TEST(TickTimer, DefaultMsPerTick) {
+ TickTimer tt;
+ EXPECT_EQ(10, tt.ms_per_tick());
+}
+
+TEST(TickTimer, CustomMsPerTick) {
+ TickTimer tt(17);
+ EXPECT_EQ(17, tt.ms_per_tick());
+}
+
+TEST(TickTimer, Increment) {
+ TickTimer tt;
+ EXPECT_EQ(0u, tt.ticks());
+ tt.Increment();
+ EXPECT_EQ(1u, tt.ticks());
+
+ for (int i = 0; i < 17; ++i) {
+ tt.Increment();
+ }
+ EXPECT_EQ(18u, tt.ticks());
+
+ tt.Increment(17);
+ EXPECT_EQ(35u, tt.ticks());
+}
+
+TEST(TickTimer, WrapAround) {
+ TickTimer tt;
+ tt.Increment(UINT64_MAX);
+ EXPECT_EQ(UINT64_MAX, tt.ticks());
+ tt.Increment();
+ EXPECT_EQ(0u, tt.ticks());
+}
+
+TEST(TickTimer, Stopwatch) {
+ TickTimer tt;
+ // Increment it a "random" number of steps.
+ tt.Increment(17);
+
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ EXPECT_EQ(0u, sw->ElapsedTicks()); // Starts at zero.
+ EXPECT_EQ(0u, sw->ElapsedMs());
+ tt.Increment();
+ EXPECT_EQ(1u, sw->ElapsedTicks()); // Increases with the TickTimer.
+ EXPECT_EQ(10u, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchWrapAround) {
+ TickTimer tt;
+ tt.Increment(UINT64_MAX);
+
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ tt.Increment();
+ EXPECT_EQ(0u, tt.ticks());
+ EXPECT_EQ(1u, sw->ElapsedTicks());
+ EXPECT_EQ(10u, sw->ElapsedMs());
+
+ tt.Increment();
+ EXPECT_EQ(1u, tt.ticks());
+ EXPECT_EQ(2u, sw->ElapsedTicks());
+ EXPECT_EQ(20u, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchMsOverflow) {
+ TickTimer tt;
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ tt.Increment(UINT64_MAX / 10);
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+
+ tt.Increment();
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+
+ tt.Increment(UINT64_MAX - tt.ticks());
+ EXPECT_EQ(UINT64_MAX, tt.ticks());
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchWithCustomTicktime) {
+ const int kMsPerTick = 17;
+ TickTimer tt(kMsPerTick);
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ EXPECT_EQ(0u, sw->ElapsedMs());
+ tt.Increment();
+ EXPECT_EQ(static_cast<uint64_t>(kMsPerTick), sw->ElapsedMs());
+}
+
+TEST(TickTimer, Countdown) {
+ TickTimer tt;
+ // Increment it a "random" number of steps.
+ tt.Increment(4711);
+
+ std::unique_ptr<TickTimer::Countdown> cd = tt.GetNewCountdown(17);
+ ASSERT_TRUE(cd);
+
+ EXPECT_FALSE(cd->Finished());
+ tt.Increment();
+ EXPECT_FALSE(cd->Finished());
+
+ tt.Increment(16); // Total increment is now 17.
+ EXPECT_TRUE(cd->Finished());
+
+ // Further increments do not change the state.
+ tt.Increment();
+ EXPECT_TRUE(cd->Finished());
+ tt.Increment(1234);
+ EXPECT_TRUE(cd->Finished());
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
index 6a91ea487b5..880b1f82ea5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
@@ -16,6 +16,7 @@
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
namespace webrtc {
@@ -158,20 +159,15 @@ TimeStretch::ReturnCodes TimeStretch::Process(const int16_t* input,
}
void TimeStretch::AutoCorrelation() {
- // Set scaling factor for cross correlation to protect against overflow.
- int scaling = kLogCorrelationLen - WebRtcSpl_NormW32(
- max_input_value_ * max_input_value_);
- scaling = std::max(0, scaling);
-
// Calculate correlation from lag kMinLag to lag kMaxLag in 4 kHz domain.
int32_t auto_corr[kCorrelationLen];
- WebRtcSpl_CrossCorrelation(auto_corr, &downsampled_input_[kMaxLag],
- &downsampled_input_[kMaxLag - kMinLag],
- kCorrelationLen, kMaxLag - kMinLag, scaling, -1);
+ CrossCorrelationWithAutoShift(
+ &downsampled_input_[kMaxLag], &downsampled_input_[kMaxLag - kMinLag],
+ kCorrelationLen, kMaxLag - kMinLag, -1, auto_corr);
// Normalize correlation to 14 bits and write to |auto_correlation_|.
int32_t max_corr = WebRtcSpl_MaxAbsValueW32(auto_corr, kCorrelationLen);
- scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
+ int scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
WebRtcSpl_VectorBitShiftW32ToW16(auto_correlation_, kCorrelationLen,
auto_corr, scaling);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
index b1cb45d2014..adaf16223b6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
@@ -23,9 +23,9 @@ namespace webrtc {
TEST(TimestampScaler, TestNoScaling) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderPCMu; // Does not use scaled timestamps.
+ // Use PCMu, because it doesn't use scaled timestamps.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
static const uint8_t kRtpPayloadType = 0;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -44,9 +44,9 @@ TEST(TimestampScaler, TestNoScaling) {
TEST(TimestampScaler, TestNoScalingLargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderPCMu; // Does not use scaled timestamps.
+ // Use PCMu, because it doesn't use scaled timestamps.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
static const uint8_t kRtpPayloadType = 0;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -70,8 +70,9 @@ TEST(TimestampScaler, TestNoScalingLargeStep) {
TEST(TimestampScaler, TestG722) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -94,8 +95,9 @@ TEST(TimestampScaler, TestG722) {
TEST(TimestampScaler, TestG722LargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -122,10 +124,11 @@ TEST(TimestampScaler, TestG722LargeStep) {
TEST(TimestampScaler, TestG722WithCng) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info_g722, info_cng;
- info_g722.codec_type =
- NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
- info_cng.codec_type = NetEqDecoder::kDecoderCNGwb;
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info_g722(NetEqDecoder::kDecoderG722, "",
+ 16000, nullptr);
+ const DecoderDatabase::DecoderInfo info_cng(NetEqDecoder::kDecoderCNGwb, "",
+ 16000, nullptr);
static const uint8_t kRtpPayloadTypeG722 = 17;
static const uint8_t kRtpPayloadTypeCng = 13;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeG722))
@@ -164,9 +167,9 @@ TEST(TimestampScaler, TestG722WithCng) {
// as many tests here.
TEST(TimestampScaler, TestG722Packet) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderG722; // Does uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -193,8 +196,9 @@ TEST(TimestampScaler, TestG722Packet) {
// we are not doing as many tests here.
TEST(TimestampScaler, TestG722PacketList) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -222,8 +226,9 @@ TEST(TimestampScaler, TestG722PacketList) {
TEST(TimestampScaler, TestG722Reset) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -262,8 +267,8 @@ TEST(TimestampScaler, TestG722Reset) {
// timestamp scaler.
TEST(TimestampScaler, TestOpusLargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderOpus;
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderOpus, "", 48000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
index 2608d9a03b7..7a51256af2d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
@@ -45,7 +45,9 @@ void NetEqExternalDecoderTest::InsertPacket(
void NetEqExternalDecoderTest::GetOutputAudio(AudioFrame* output) {
// Get audio from regular instance.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(output, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(channels_, output->num_channels_);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
output->samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
index 59402a2029b..d0052c28a8d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h"
+#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
@@ -105,7 +106,9 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
}
// Get output audio, but don't do anything with it.
- int error = neteq->GetAudio(&out_frame);
+ bool muted;
+ int error = neteq->GetAudio(&out_frame, &muted);
+ RTC_CHECK(!muted);
if (error != NetEq::kOK)
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
index 5f874ad8dbe..2983cebe9d4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
@@ -391,7 +391,9 @@ int NetEqQualityTest::Transmit() {
}
int NetEqQualityTest::DecodeBlock() {
- int ret = neteq_->GetAudio(&out_frame_);
+ bool muted;
+ int ret = neteq_->GetAudio(&out_frame_, &muted);
+ RTC_CHECK(!muted);
if (ret != NetEq::kOK) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
index fdb66714cfb..1d462b3c9f2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
@@ -605,7 +605,9 @@ int main(int argc, char* argv[]) {
// Check if it is time to get output audio.
while (time_now_ms >= next_output_time_ms && output_event_available) {
webrtc::AudioFrame out_frame;
- int error = neteq->GetAudio(&out_frame);
+ bool muted;
+ int error = neteq->GetAudio(&out_frame, &muted);
+ RTC_CHECK(!muted);
if (error != NetEq::kOK) {
std::cerr << "GetAudio returned error code " <<
neteq->LastError() << std::endl;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
index dad72eaecd1..9192839be30 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
@@ -16,51 +16,15 @@
#include <limits>
#include "webrtc/base/checks.h"
+#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
-// Files generated at build-time by the protobuf compiler.
-#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
-#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
-#else
-#include "webrtc/call/rtc_event_log.pb.h"
-#endif
namespace webrtc {
namespace test {
-namespace {
-
-const rtclog::RtpPacket* GetRtpPacket(const rtclog::Event& event) {
- if (!event.has_type() || event.type() != rtclog::Event::RTP_EVENT)
- return nullptr;
- if (!event.has_timestamp_us() || !event.has_rtp_packet())
- return nullptr;
- const rtclog::RtpPacket& rtp_packet = event.rtp_packet();
- if (!rtp_packet.has_type() || rtp_packet.type() != rtclog::AUDIO ||
- !rtp_packet.has_incoming() || !rtp_packet.incoming() ||
- !rtp_packet.has_packet_length() || rtp_packet.packet_length() == 0 ||
- !rtp_packet.has_header() || rtp_packet.header().size() == 0 ||
- rtp_packet.packet_length() < rtp_packet.header().size())
- return nullptr;
- return &rtp_packet;
-}
-
-const rtclog::AudioPlayoutEvent* GetAudioPlayoutEvent(
- const rtclog::Event& event) {
- if (!event.has_type() || event.type() != rtclog::Event::AUDIO_PLAYOUT_EVENT)
- return nullptr;
- if (!event.has_timestamp_us() || !event.has_audio_playout_event())
- return nullptr;
- const rtclog::AudioPlayoutEvent& playout_event = event.audio_playout_event();
- if (!playout_event.has_local_ssrc())
- return nullptr;
- return &playout_event;
-}
-
-} // namespace
-
RtcEventLogSource* RtcEventLogSource::Create(const std::string& file_name) {
RtcEventLogSource* source = new RtcEventLogSource();
RTC_CHECK(source->OpenFile(file_name));
@@ -76,42 +40,57 @@ bool RtcEventLogSource::RegisterRtpHeaderExtension(RTPExtensionType type,
}
Packet* RtcEventLogSource::NextPacket() {
- while (rtp_packet_index_ < event_log_->stream_size()) {
- const rtclog::Event& event = event_log_->stream(rtp_packet_index_);
- const rtclog::RtpPacket* rtp_packet = GetRtpPacket(event);
- rtp_packet_index_++;
- if (rtp_packet) {
- uint8_t* packet_header = new uint8_t[rtp_packet->header().size()];
- memcpy(packet_header, rtp_packet->header().data(),
- rtp_packet->header().size());
- Packet* packet = new Packet(packet_header, rtp_packet->header().size(),
- rtp_packet->packet_length(),
- event.timestamp_us() / 1000, *parser_.get());
- if (packet->valid_header()) {
- // Check if the packet should not be filtered out.
- if (!filter_.test(packet->header().payloadType) &&
- !(use_ssrc_filter_ && packet->header().ssrc != ssrc_))
- return packet;
- } else {
- std::cout << "Warning: Packet with index " << (rtp_packet_index_ - 1)
- << " has an invalid header and will be ignored." << std::endl;
+ while (rtp_packet_index_ < parsed_stream_.GetNumberOfEvents()) {
+ if (parsed_stream_.GetEventType(rtp_packet_index_) ==
+ ParsedRtcEventLog::RTP_EVENT) {
+ PacketDirection direction;
+ MediaType media_type;
+ size_t header_length;
+ size_t packet_length;
+ uint64_t timestamp_us = parsed_stream_.GetTimestamp(rtp_packet_index_);
+ parsed_stream_.GetRtpHeader(rtp_packet_index_, &direction, &media_type,
+ nullptr, &header_length, &packet_length);
+ if (direction == kIncomingPacket && media_type == MediaType::AUDIO) {
+ uint8_t* packet_header = new uint8_t[header_length];
+ parsed_stream_.GetRtpHeader(rtp_packet_index_, nullptr, nullptr,
+ packet_header, nullptr, nullptr);
+ Packet* packet = new Packet(packet_header, header_length, packet_length,
+ static_cast<double>(timestamp_us) / 1000,
+ *parser_.get());
+ if (packet->valid_header()) {
+ // Check if the packet should not be filtered out.
+ if (!filter_.test(packet->header().payloadType) &&
+ !(use_ssrc_filter_ && packet->header().ssrc != ssrc_)) {
+ rtp_packet_index_++;
+ return packet;
+ }
+ } else {
+ std::cout << "Warning: Packet with index " << rtp_packet_index_
+ << " has an invalid header and will be ignored."
+ << std::endl;
+ }
+ // The packet has either an invalid header or needs to be filtered out,
+ // so it can be deleted.
+ delete packet;
}
- // The packet has either an invalid header or needs to be filtered out, so
- // it can be deleted.
- delete packet;
}
+ rtp_packet_index_++;
}
return nullptr;
}
int64_t RtcEventLogSource::NextAudioOutputEventMs() {
- while (audio_output_index_ < event_log_->stream_size()) {
- const rtclog::Event& event = event_log_->stream(audio_output_index_);
- const rtclog::AudioPlayoutEvent* playout_event =
- GetAudioPlayoutEvent(event);
+ while (audio_output_index_ < parsed_stream_.GetNumberOfEvents()) {
+ if (parsed_stream_.GetEventType(audio_output_index_) ==
+ ParsedRtcEventLog::AUDIO_PLAYOUT_EVENT) {
+ uint64_t timestamp_us = parsed_stream_.GetTimestamp(audio_output_index_);
+ // We call GetAudioPlayout only to check that the protobuf event is
+ // well-formed.
+ parsed_stream_.GetAudioPlayout(audio_output_index_, nullptr);
+ audio_output_index_++;
+ return timestamp_us / 1000;
+ }
audio_output_index_++;
- if (playout_event)
- return event.timestamp_us() / 1000;
}
return std::numeric_limits<int64_t>::max();
}
@@ -120,8 +99,7 @@ RtcEventLogSource::RtcEventLogSource()
: PacketSource(), parser_(RtpHeaderParser::Create()) {}
bool RtcEventLogSource::OpenFile(const std::string& file_name) {
- event_log_.reset(new rtclog::EventStream());
- return RtcEventLog::ParseRtcEventLog(file_name, event_log_.get());
+ return parsed_stream_.ParseFile(file_name);
}
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
index 312338ee087..ad7add154c5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
@@ -15,6 +15,7 @@
#include <string>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/call/rtc_event_log_parser.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -22,10 +23,6 @@ namespace webrtc {
class RtpHeaderParser;
-namespace rtclog {
-class EventStream;
-} // namespace rtclog
-
namespace test {
class Packet;
@@ -55,10 +52,10 @@ class RtcEventLogSource : public PacketSource {
bool OpenFile(const std::string& file_name);
- int rtp_packet_index_ = 0;
- int audio_output_index_ = 0;
+ size_t rtp_packet_index_ = 0;
+ size_t audio_output_index_ = 0;
- std::unique_ptr<rtclog::EventStream> event_log_;
+ ParsedRtcEventLog parsed_stream_;
std::unique_ptr<RtpHeaderParser> parser_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogSource);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
index a2506ba0113..833398acddf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
@@ -21,13 +21,13 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
#include "webrtc/modules/audio_coding/test/utility.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -323,7 +323,8 @@ bool APITest::APIThreadB(void* obj) {
bool APITest::PullAudioRunA() {
_pullEventA->Wait(100);
AudioFrame audioFrame;
- if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame) < 0) {
+ bool muted;
+ if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame, &muted) < 0) {
bool thereIsDecoder;
{
ReadLockScoped rl(_apiTestRWLock);
@@ -343,7 +344,8 @@ bool APITest::PullAudioRunA() {
bool APITest::PullAudioRunB() {
_pullEventB->Wait(100);
AudioFrame audioFrame;
- if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame) < 0) {
+ bool muted;
+ if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame, &muted) < 0) {
bool thereIsDecoder;
{
ReadLockScoped rl(_apiTestRWLock);
@@ -560,7 +562,7 @@ void APITest::Perform() {
// Keep main thread waiting for sender/receiver
// threads to complete
EventWrapper* completeEvent = EventWrapper::Create();
- uint64_t startTime = TickTime::MillisecondTimestamp();
+ uint64_t startTime = rtc::TimeMillis();
uint64_t currentTime;
// Run test in 2 minutes (120000 ms).
do {
@@ -570,7 +572,7 @@ void APITest::Perform() {
}
//fflush(stderr);
completeEvent->Wait(50);
- currentTime = TickTime::MillisecondTimestamp();
+ currentTime = rtc::TimeMillis();
} while ((currentTime - startTime) < 120000);
//completeEvent->Wait(0xFFFFFFFF);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc b/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
index 0507691fb4d..46c398b1b75 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
@@ -14,7 +14,7 @@
#include <iostream>
#include "webrtc/base/format_macros.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/base/timeutils.h"
namespace webrtc {
@@ -234,7 +234,7 @@ Channel::Channel(int16_t chID)
_lastFrameSizeSample(0),
_packetLoss(0),
_useFECTestWithPacketLoss(false),
- _beginTime(TickTime::MillisecondTimestamp()),
+ _beginTime(rtc::TimeMillis()),
_totalBytes(0),
external_send_timestamp_(-1),
external_sequence_number_(-1),
@@ -286,7 +286,7 @@ void Channel::ResetStats() {
_payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
}
}
- _beginTime = TickTime::MillisecondTimestamp();
+ _beginTime = rtc::TimeMillis();
_totalBytes = 0;
_channelCritSect.Leave();
}
@@ -411,7 +411,7 @@ uint32_t Channel::LastInTimestamp() {
double Channel::BitRate() {
double rate;
- uint64_t currTime = TickTime::MillisecondTimestamp();
+ uint64_t currTime = rtc::TimeMillis();
_channelCritSect.Enter();
rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
_channelCritSect.Leave();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
index e0632243bf4..724502354e9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
@@ -208,8 +208,12 @@ bool Receiver::IncomingPacket() {
bool Receiver::PlayoutData() {
AudioFrame audioFrame;
-
- int32_t ok =_acm->PlayoutData10Ms(_frequency, &audioFrame);
+ bool muted;
+ int32_t ok = _acm->PlayoutData10Ms(_frequency, &audioFrame, &muted);
+ if (muted) {
+ ADD_FAILURE();
+ return false;
+ }
EXPECT_EQ(0, ok);
if (ok < 0){
return false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc b/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc
deleted file mode 100644
index c9f80808260..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <string.h>
-
-#include <math.h>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/test/SpatialAudio.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#define NUM_PANN_COEFFS 10
-
-SpatialAudio::SpatialAudio(int testMode)
- : _acmLeft(AudioCodingModule::Create(1)),
- _acmRight(AudioCodingModule::Create(2)),
- _acmReceiver(AudioCodingModule::Create(3)),
- _testMode(testMode) {
-}
-
-SpatialAudio::~SpatialAudio() {
- delete _channel;
- _inFile.Close();
- _outFile.Close();
-}
-
-int16_t SpatialAudio::Setup() {
- _channel = new Channel;
-
- // Register callback for the sender side.
- CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
- CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
- // Register the receiver ACM in channel
- _channel->RegisterReceiverACM(_acmReceiver.get());
-
- uint16_t sampFreqHz = 32000;
-
- const std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- _inFile.Open(file_name, sampFreqHz, "rb", false);
-
- std::string output_file = webrtc::test::OutputPath()
- + "out_spatial_autotest.pcm";
- if (_testMode == 1) {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- printf("\n");
- printf("Enter the output file [%s]: ", output_file.c_str());
- PCMFile::ChooseFile(&output_file, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
- } else {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- }
- _outFile.Open(output_file, sampFreqHz, "wb", false);
- _outFile.SaveStereo(true);
-
- // Register all available codes as receiving codecs.
- CodecInst codecInst;
- int status;
- uint8_t num_encoders = _acmReceiver->NumberOfCodecs();
- // Register all available codes as receiving codecs once more.
- for (uint8_t n = 0; n < num_encoders; n++) {
- status = _acmReceiver->Codec(n, &codecInst);
- if (status < 0) {
- printf("Error in Codec(), no matching codec found");
- }
- status = _acmReceiver->RegisterReceiveCodec(codecInst);
- if (status < 0) {
- printf("Error in RegisterReceiveCodec() for payload type %d",
- codecInst.pltype);
- }
- }
-
- return 0;
-}
-
-void SpatialAudio::Perform() {
- if (_testMode == 0) {
- printf("Running SpatialAudio Test");
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
- "---------- SpatialAudio ----------");
- }
-
- Setup();
-
- CodecInst codecInst;
- _acmLeft->Codec((uint8_t) 1, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- int16_t pannCntr = 0;
-
- double leftPanning[NUM_PANN_COEFFS] = { 1.00, 0.95, 0.90, 0.85, 0.80, 0.75,
- 0.70, 0.60, 0.55, 0.50 };
- double rightPanning[NUM_PANN_COEFFS] = { 0.50, 0.55, 0.60, 0.70, 0.75, 0.80,
- 0.85, 0.90, 0.95, 1.00 };
-
- while ((pannCntr + 1) < NUM_PANN_COEFFS) {
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
-
- // Change codec
- _acmLeft->Codec((uint8_t) 3, &codecInst);
- codecInst.pacsize = 320;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
- if (_testMode == 0) {
- printf(".");
- }
- }
-
- _acmLeft->Codec((uint8_t) 4, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
- pannCntr = NUM_PANN_COEFFS - 1;
- while (pannCntr >= 0) {
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr--;
- if (_testMode == 0) {
- printf(".");
- }
- }
- if (_testMode == 0) {
- printf("Done!\n");
- }
-}
-
-void SpatialAudio::EncodeDecode(const double leftPanning,
- const double rightPanning) {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- const double rightToLeftRatio = rightPanning / leftPanning;
-
- _channel->SetIsStereo(true);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * leftPanning + 0.5);
- }
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * rightToLeftRatio + 0.5);
- }
- CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-void SpatialAudio::EncodeDecode() {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- _channel->SetIsStereo(false);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h b/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h
deleted file mode 100644
index 270c370cf46..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
-#define WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
-
-#include <memory>
-
-#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/test/Channel.h"
-#include "webrtc/modules/audio_coding/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/test/utility.h"
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-
-namespace webrtc {
-
-class SpatialAudio : public ACMTest {
- public:
- SpatialAudio(int testMode);
- ~SpatialAudio();
-
- void Perform();
- private:
- int16_t Setup();
- void EncodeDecode(double leftPanning, double rightPanning);
- void EncodeDecode();
-
- std::unique_ptr<AudioCodingModule> _acmLeft;
- std::unique_ptr<AudioCodingModule> _acmRight;
- std::unique_ptr<AudioCodingModule> _acmReceiver;
- Channel* _channel;
- PCMFile _inFile;
- PCMFile _outFile;
- int _testMode;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
index bacfd371880..80a0464d385 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
@@ -452,7 +452,9 @@ void TestAllCodecs::Run(TestPack* channel) {
}
// Run received side of ACM.
- CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame));
+ bool muted;
+ CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file.
outfile_b_.Write10MsData(audio_frame.data_,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
index a1bdc04e531..24cda1122b9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
@@ -461,7 +461,9 @@ void TestRedFec::Run() {
while (!_inFileA.EndOfFile()) {
EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
}
_inFileA.Rewind();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
index 9bf560d3237..85a2ae283b2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
@@ -792,7 +792,9 @@ void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels,
}
// Run received side of ACM
- EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file
out_file_.Write10MsData(
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
index 229dc2d4745..4f53e47cacc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
@@ -108,7 +108,9 @@ void TestVadDtx::Run(std::string in_filename, int frequency, int channels,
audio_frame.timestamp_ = time_stamp;
time_stamp += frame_size_samples;
EXPECT_GE(acm_send_->Add10MsData(audio_frame), 0);
- acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame);
+ bool muted;
+ acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame, &muted);
+ ASSERT_FALSE(muted);
out_file.Write10MsData(audio_frame);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
index 161491b0610..b59ff1fdccc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
@@ -261,13 +261,18 @@ void TwoWayCommunication::Perform() {
EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
EXPECT_GE(_acmRefB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileRefA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileRefB.Write10MsData(audioFrame);
// Update time counters each time a second of data has passed.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc b/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
index 8fa1fb1a3d0..50702f96b4e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
@@ -204,7 +204,10 @@ class DelayTest {
in_file_a_.Read10MsData(audio_frame);
ASSERT_GE(acm_a_->Add10MsData(audio_frame), 0);
- ASSERT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ ASSERT_EQ(0,
+ acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ RTC_DCHECK(!muted);
out_file_b_.Write10MsData(
audio_frame.data_,
audio_frame.samples_per_channel_ * audio_frame.num_channels_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
index 9f223fb81fb..f06833c2d90 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
@@ -26,7 +26,6 @@
#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
#include "webrtc/modules/audio_coding/test/utility.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -199,9 +198,12 @@ void ISACTest::Run10ms() {
EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc b/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
index 966f4c636c6..27a8833ac67 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
@@ -141,8 +141,10 @@ class InsertPacketWithTiming {
// Is it time to pull audio?
if (time_to_playout_audio_ms_ == 0) {
time_to_playout_audio_ms_ = kPlayoutPeriodMs;
+ bool muted;
receive_acm_->PlayoutData10Ms(static_cast<int>(FLAGS_output_fs_hz),
- &frame_);
+ &frame_, &muted);
+ ASSERT_FALSE(muted);
fwrite(frame_.data_, sizeof(frame_.data_[0]),
frame_.samples_per_channel_ * frame_.num_channels_, pcm_out_fid_);
*action |= kAudioPlayedOut;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc b/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
index 104b5e587b2..5d250280f45 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
@@ -336,7 +336,10 @@ void OpusTest::Run(TestPackStereo* channel, size_t channels, int bitrate,
}
// Run received side of ACM.
- ASSERT_EQ(0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ ASSERT_EQ(
+ 0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file.
out_file_.Write10MsData(
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
index 99c1c2da1ee..5de5bf262b2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
@@ -150,8 +150,10 @@ class TargetDelayTest : public ::testing::Test {
// Pull audio equivalent to the amount of audio in one RTP packet.
void Pull() {
AudioFrame frame;
+ bool muted;
for (int k = 0; k < kNum10msPerFrame; ++k) { // Pull one frame.
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame));
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame, &muted));
+ ASSERT_FALSE(muted);
// Had to use ASSERT_TRUE, ASSERT_EQ generated error.
ASSERT_TRUE(kSampleRateHz == frame.sample_rate_hz_);
ASSERT_EQ(1u, frame.num_channels_);
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
index 5d58f42435e..e1c5aedc887 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+#include "webrtc/base/checks.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
@@ -25,8 +26,34 @@ public:
// audio every time it's called.
//
// If it returns -1, the frame will not be added to the mix.
+ //
+ // NOTE: This function should not be called. It will remain for a short
+ // time so that subclasses can override it without getting warnings.
+ // TODO(henrik.lundin) Remove this function.
virtual int32_t GetAudioFrame(int32_t id,
- AudioFrame* audioFrame) = 0;
+ AudioFrame* audioFrame) {
+ RTC_CHECK(false);
+ return -1;
+ }
+
+
+ // The implementation of GetAudioFrameWithMuted should update audio_frame
+ // with new audio every time it's called. The return value will be
+ // interpreted as follows.
+ enum class AudioFrameInfo {
+ kNormal, // The samples in audio_frame are valid and should be used.
+ kMuted, // The samples in audio_frame should not be used, but should be
+ // implicitly interpreted as zero. Other fields in audio_frame
+ // may be read and should contain meaningful values.
+ kError // audio_frame will not be used.
+ };
+
+ virtual AudioFrameInfo GetAudioFrameWithMuted(int32_t id,
+ AudioFrame* audio_frame) {
+ return GetAudioFrame(id, audio_frame) == -1 ?
+ AudioFrameInfo::kError :
+ AudioFrameInfo::kNormal;
+ }
// Returns true if the participant was mixed this mix iteration.
bool IsMixed() const;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index 1d7602533e4..dce3d0b5451 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -19,12 +19,15 @@
namespace webrtc {
namespace {
-struct ParticipantFramePair {
+struct ParticipantFrameStruct {
+ ParticipantFrameStruct(MixerParticipant* p, AudioFrame* a, bool m)
+ : participant(p), audioFrame(a), muted(m) {}
MixerParticipant* participant;
AudioFrame* audioFrame;
+ bool muted;
};
-typedef std::list<ParticipantFramePair*> ParticipantFramePairList;
+typedef std::list<ParticipantFrameStruct*> ParticipantFrameStructList;
// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
// These effects are applied to |frame| itself prior to mixing. Assumes that
@@ -55,7 +58,7 @@ size_t MaxNumChannels(const AudioFrameList* list) {
for (AudioFrameList::const_iterator iter = list->begin();
iter != list->end();
++iter) {
- max_num_channels = std::max(max_num_channels, (*iter)->num_channels_);
+ max_num_channels = std::max(max_num_channels, (*iter).frame->num_channels_);
}
return max_num_channels;
}
@@ -529,8 +532,8 @@ void AudioConferenceMixerImpl::UpdateToMix(
AudioFrameList activeList;
// Struct needed by the passive lists to keep track of which AudioFrame
// belongs to which MixerParticipant.
- ParticipantFramePairList passiveWasNotMixedList;
- ParticipantFramePairList passiveWasMixedList;
+ ParticipantFrameStructList passiveWasNotMixedList;
+ ParticipantFrameStructList passiveWasMixedList;
for (MixerParticipantList::const_iterator participant =
_participantList.begin(); participant != _participantList.end();
++participant) {
@@ -552,12 +555,14 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
+ auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
+ if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
- "failed to GetAudioFrame() from participant");
+ "failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
+ const bool muted = (ret == MixerParticipant::AudioFrameInfo::kMuted);
if (_participantList.size() != 1) {
// TODO(wu): Issue 3390, add support for multiple participants case.
audioFrame->ntp_time_ms_ = -1;
@@ -573,7 +578,7 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
if(audioFrame->vad_activity_ == AudioFrame::kVadActive) {
- if(!wasMixed) {
+ if(!wasMixed && !muted) {
RampIn(*audioFrame);
}
@@ -581,13 +586,15 @@ void AudioConferenceMixerImpl::UpdateToMix(
// There are already more active participants than should be
// mixed. Only keep the ones with the highest energy.
AudioFrameList::iterator replaceItem;
- uint32_t lowestEnergy = CalculateEnergy(*audioFrame);
+ uint32_t lowestEnergy =
+ muted ? 0 : CalculateEnergy(*audioFrame);
bool found_replace_item = false;
for (AudioFrameList::iterator iter = activeList.begin();
iter != activeList.end();
++iter) {
- const uint32_t energy = CalculateEnergy(**iter);
+ const uint32_t energy =
+ muted ? 0 : CalculateEnergy(*iter->frame);
if(energy < lowestEnergy) {
replaceItem = iter;
lowestEnergy = energy;
@@ -595,11 +602,12 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
}
if(found_replace_item) {
- AudioFrame* replaceFrame = *replaceItem;
+ RTC_DCHECK(!muted); // Cannot replace with a muted frame.
+ FrameAndMuteInfo replaceFrame = *replaceItem;
bool replaceWasMixed = false;
std::map<int, MixerParticipant*>::const_iterator it =
- mixParticipantList->find(replaceFrame->id_);
+ mixParticipantList->find(replaceFrame.frame->id_);
// When a frame is pushed to |activeList| it is also pushed
// to mixParticipantList with the frame's id. This means
@@ -607,26 +615,31 @@ void AudioConferenceMixerImpl::UpdateToMix(
assert(it != mixParticipantList->end());
replaceWasMixed = it->second->_mixHistory->WasMixed();
- mixParticipantList->erase(replaceFrame->id_);
+ mixParticipantList->erase(replaceFrame.frame->id_);
activeList.erase(replaceItem);
- activeList.push_front(audioFrame);
+ activeList.push_front(FrameAndMuteInfo(audioFrame, muted));
(*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
if (replaceWasMixed) {
- RampOut(*replaceFrame);
+ if (!replaceFrame.muted) {
+ RampOut(*replaceFrame.frame);
+ }
rampOutList->push_back(replaceFrame);
assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
- _audioFramePool->PushMemory(replaceFrame);
+ _audioFramePool->PushMemory(replaceFrame.frame);
}
} else {
if(wasMixed) {
- RampOut(*audioFrame);
- rampOutList->push_back(audioFrame);
+ if (!muted) {
+ RampOut(*audioFrame);
+ }
+ rampOutList->push_back(FrameAndMuteInfo(audioFrame,
+ muted));
assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
@@ -634,23 +647,23 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
}
} else {
- activeList.push_front(audioFrame);
+ activeList.push_front(FrameAndMuteInfo(audioFrame, muted));
(*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
} else {
if(wasMixed) {
- ParticipantFramePair* pair = new ParticipantFramePair;
- pair->audioFrame = audioFrame;
- pair->participant = *participant;
- passiveWasMixedList.push_back(pair);
+ ParticipantFrameStruct* part_struct =
+ new ParticipantFrameStruct(*participant, audioFrame, muted);
+ passiveWasMixedList.push_back(part_struct);
} else if(mustAddToPassiveList) {
- RampIn(*audioFrame);
- ParticipantFramePair* pair = new ParticipantFramePair;
- pair->audioFrame = audioFrame;
- pair->participant = *participant;
- passiveWasNotMixedList.push_back(pair);
+ if (!muted) {
+ RampIn(*audioFrame);
+ }
+ ParticipantFrameStruct* part_struct =
+ new ParticipantFrameStruct(*participant, audioFrame, muted);
+ passiveWasNotMixedList.push_back(part_struct);
} else {
_audioFramePool->PushMemory(audioFrame);
}
@@ -668,11 +681,12 @@ void AudioConferenceMixerImpl::UpdateToMix(
// Always mix a constant number of AudioFrames. If there aren't enough
// active participants mix passive ones. Starting with those that was mixed
// last iteration.
- for (ParticipantFramePairList::const_iterator
+ for (ParticipantFrameStructList::const_iterator
iter = passiveWasMixedList.begin(); iter != passiveWasMixedList.end();
++iter) {
if(mixList->size() < *maxAudioFrameCounter + mixListStartSize) {
- mixList->push_back((*iter)->audioFrame);
+ mixList->push_back(FrameAndMuteInfo((*iter)->audioFrame,
+ (*iter)->muted));
(*mixParticipantList)[(*iter)->audioFrame->id_] =
(*iter)->participant;
assert(mixParticipantList->size() <=
@@ -683,12 +697,13 @@ void AudioConferenceMixerImpl::UpdateToMix(
delete *iter;
}
// And finally the ones that have not been mixed for a while.
- for (ParticipantFramePairList::const_iterator iter =
+ for (ParticipantFrameStructList::const_iterator iter =
passiveWasNotMixedList.begin();
iter != passiveWasNotMixedList.end();
++iter) {
if(mixList->size() < *maxAudioFrameCounter + mixListStartSize) {
- mixList->push_back((*iter)->audioFrame);
+ mixList->push_back(FrameAndMuteInfo((*iter)->audioFrame,
+ (*iter)->muted));
(*mixParticipantList)[(*iter)->audioFrame->id_] =
(*iter)->participant;
assert(mixParticipantList->size() <=
@@ -706,10 +721,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
AudioFrameList* additionalFramesList) const {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"GetAdditionalAudio(additionalFramesList)");
- // The GetAudioFrame() callback may result in the participant being removed
- // from additionalParticipantList_. If that happens it will invalidate any
- // iterators. Create a copy of the participants list such that the list of
- // participants can be traversed safely.
+ // The GetAudioFrameWithMuted() callback may result in the participant being
+ // removed from additionalParticipantList_. If that happens it will
+ // invalidate any iterators. Create a copy of the participants list such
+ // that the list of participants can be traversed safely.
MixerParticipantList additionalParticipantList;
additionalParticipantList.insert(additionalParticipantList.begin(),
_additionalParticipantList.begin(),
@@ -727,9 +742,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
return;
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
+ auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
+ if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
- "failed to GetAudioFrame() from participant");
+ "failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
@@ -738,7 +754,8 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
_audioFramePool->PushMemory(audioFrame);
continue;
}
- additionalFramesList->push_back(audioFrame);
+ additionalFramesList->push_back(FrameAndMuteInfo(
+ audioFrame, ret == MixerParticipant::AudioFrameInfo::kMuted));
}
}
@@ -775,7 +792,7 @@ void AudioConferenceMixerImpl::ClearAudioFrameList(
for (AudioFrameList::iterator iter = audioFrameList->begin();
iter != audioFrameList->end();
++iter) {
- _audioFramePool->PushMemory(*iter);
+ _audioFramePool->PushMemory(iter->frame);
}
audioFrameList->clear();
}
@@ -834,8 +851,9 @@ int32_t AudioConferenceMixerImpl::MixFromList(
uint32_t position = 0;
if (_numMixedParticipants == 1) {
- mixedAudio->timestamp_ = audioFrameList.front()->timestamp_;
- mixedAudio->elapsed_time_ms_ = audioFrameList.front()->elapsed_time_ms_;
+ mixedAudio->timestamp_ = audioFrameList.front().frame->timestamp_;
+ mixedAudio->elapsed_time_ms_ =
+ audioFrameList.front().frame->elapsed_time_ms_;
} else {
// TODO(wu): Issue 3390.
// Audio frame timestamp is only supported in one channel case.
@@ -857,7 +875,9 @@ int32_t AudioConferenceMixerImpl::MixFromList(
assert(false);
position = 0;
}
- MixFrames(mixedAudio, (*iter), use_limiter_);
+ if (!iter->muted) {
+ MixFrames(mixedAudio, iter->frame, use_limiter_);
+ }
position++;
}
@@ -877,7 +897,9 @@ int32_t AudioConferenceMixerImpl::MixAnonomouslyFromList(
for (AudioFrameList::const_iterator iter = audioFrameList.begin();
iter != audioFrameList.end();
++iter) {
- MixFrames(mixedAudio, *iter, use_limiter_);
+ if (!iter->muted) {
+ MixFrames(mixedAudio, iter->frame, use_limiter_);
+ }
}
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index 22c6a235351..e726c08f75a 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -25,7 +25,13 @@ namespace webrtc {
class AudioProcessing;
class CriticalSectionWrapper;
-typedef std::list<AudioFrame*> AudioFrameList;
+struct FrameAndMuteInfo {
+ FrameAndMuteInfo(AudioFrame* f, bool m) : frame(f), muted(m) {}
+ AudioFrame* frame;
+ bool muted;
+};
+
+typedef std::list<FrameAndMuteInfo> AudioFrameList;
typedef std::list<MixerParticipant*> MixerParticipantList;
// Cheshire cat implementation of MixerParticipant's non virtual functions.
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
index 19f5bd8848a..30b2933b61c 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
@@ -17,7 +18,7 @@ TimeScheduler::TimeScheduler(const int64_t periodicityInMs)
_isStarted(false),
_lastPeriodMark(),
_periodicityInMs(periodicityInMs),
- _periodicityInTicks(TickTime::MillisecondsToTicks(periodicityInMs)),
+ _periodicityInTicks(periodicityInMs * rtc::kNumNanosecsPerMillisec),
_missedPeriods(0)
{
}
@@ -33,7 +34,7 @@ int32_t TimeScheduler::UpdateScheduler()
if(!_isStarted)
{
_isStarted = true;
- _lastPeriodMark = TickTime::Now();
+ _lastPeriodMark = rtc::TimeNanos();
return 0;
}
// Don't perform any calculations until the debt of pending periods have
@@ -45,9 +46,9 @@ int32_t TimeScheduler::UpdateScheduler()
}
// Calculate the time that has past since previous call to this function.
- TickTime tickNow = TickTime::Now();
- TickInterval amassedTicks = tickNow - _lastPeriodMark;
- int64_t amassedMs = amassedTicks.Milliseconds();
+ int64_t tickNow = rtc::TimeNanos();
+ int64_t amassedTicks = tickNow - _lastPeriodMark;
+ int64_t amassedMs = amassedTicks / rtc::kNumNanosecsPerMillisec;
// Calculate the number of periods the time that has passed correspond to.
int64_t periodsToClaim = amassedMs / _periodicityInMs;
@@ -89,10 +90,10 @@ int32_t TimeScheduler::TimeToNextUpdate(
// Calculate the time (in ms) that has past since last call to
// UpdateScheduler()
- TickTime tickNow = TickTime::Now();
- TickInterval ticksSinceLastUpdate = tickNow - _lastPeriodMark;
+ int64_t tickNow = rtc::TimeNanos();
+ int64_t ticksSinceLastUpdate = tickNow - _lastPeriodMark;
const int64_t millisecondsSinceLastUpdate =
- ticksSinceLastUpdate.Milliseconds();
+ ticksSinceLastUpdate / rtc::kNumNanosecsPerMillisec;
updateTimeInMS = _periodicityInMs - millisecondsSinceLastUpdate;
updateTimeInMS = (updateTimeInMS < 0) ? 0 : updateTimeInMS;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
index 09d0caa66ae..d1897fa1001 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
@@ -15,8 +15,6 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
-#include "webrtc/system_wrappers/include/tick_util.h"
-
namespace webrtc {
class CriticalSectionWrapper;
class TimeScheduler
@@ -36,7 +34,7 @@ private:
CriticalSectionWrapper* _crit;
bool _isStarted;
- TickTime _lastPeriodMark;
+ int64_t _lastPeriodMark; // In ns
int64_t _periodicityInMs;
int64_t _periodicityInTicks;
diff --git a/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate b/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate
new file mode 100644
index 00000000000..427559dcb05
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'audio_decoder_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_audio_decoder_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/audio_decoder_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_audio_decoder_unittests',
+ 'audio_decoder_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
index 198b67d5200..d743a6a58f7 100644
--- a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
@@ -138,7 +138,7 @@ source_set("audio_device") {
]
}
if (is_ios) {
- deps += [ "../../base:rtc_base_objc" ]
+ deps += [ "../../sdk:rtc_sdk_common_objc" ]
sources += [
"ios/audio_device_ios.h",
"ios/audio_device_ios.mm",
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
index 7655c820ada..48944f57726 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
@@ -562,7 +562,7 @@ class AudioDeviceTest : public ::testing::Test {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
AudioDeviceModule::AudioLayer audio_layer) {
rtc::scoped_refptr<AudioDeviceModule> module(
- AudioDeviceModuleImpl::Create(0, audio_layer));
+ AudioDeviceModule::Create(0, audio_layer));
return module;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc b/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
index 9174a5b7ab1..01e5d5fe4f2 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
@@ -66,7 +66,7 @@ bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
// AudioManager implementation
AudioManager::AudioManager()
- : j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
+ : j_environment_(JVM::GetInstance()->environment()),
audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
initialized_(false),
hardware_aec_(false),
@@ -80,14 +80,14 @@ AudioManager::AudioManager()
{"nativeCacheAudioParameters",
"(IIZZZZIIJ)V",
reinterpret_cast<void*>(&webrtc::AudioManager::CacheAudioParameters)}};
- j_native_registration_ = rtc::ScopedToUnique(j_environment_->RegisterNatives(
- "org/webrtc/voiceengine/WebRtcAudioManager",
- native_methods, arraysize(native_methods)));
+ j_native_registration_ = j_environment_->RegisterNatives(
+ "org/webrtc/voiceengine/WebRtcAudioManager", native_methods,
+ arraysize(native_methods));
j_audio_manager_.reset(new JavaAudioManager(
j_native_registration_.get(),
- rtc::ScopedToUnique(j_native_registration_->NewObject(
+ j_native_registration_->NewObject(
"<init>", "(Landroid/content/Context;J)V",
- JVM::GetInstance()->context(), PointerTojlong(this)))));
+ JVM::GetInstance()->context(), PointerTojlong(this))));
}
AudioManager::~AudioManager() {
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc b/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
index c6cecc96c5b..455c12f7fd0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
@@ -15,10 +15,9 @@
namespace webrtc {
BuildInfo::BuildInfo()
- : j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
- j_build_info_(JVM::GetInstance()->GetClass(
- "org/webrtc/voiceengine/BuildInfo")) {
-}
+ : j_environment_(JVM::GetInstance()->environment()),
+ j_build_info_(
+ JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {}
std::string BuildInfo::GetStringFromJava(const char* name) {
jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;");
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
index 084546dbf7b..4058ff99948 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
@@ -132,7 +132,7 @@ class OpenSLESPlayer {
const AudioParameters audio_parameters_;
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
- // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
AudioDeviceBuffer* audio_device_buffer_;
bool initialized_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
index abefcdb4507..a7c57896585 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
@@ -167,7 +167,10 @@
}],
['OS=="ios"', {
'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
+ ],
+ 'export_dependent_settings': [
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
],
'sources': [
'ios/audio_device_ios.h',
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
index d881bcd40c9..7abc94d5f77 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
@@ -9,10 +9,11 @@
*/
#include "webrtc/base/refcount.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/audio_device_impl.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include <assert.h>
#include <string.h>
@@ -75,12 +76,12 @@ namespace webrtc {
// AudioDeviceModule::Create()
// ----------------------------------------------------------------------------
-rtc::scoped_refptr<AudioDeviceModule> AudioDeviceModuleImpl::Create(
+rtc::scoped_refptr<AudioDeviceModule> AudioDeviceModule::Create(
const int32_t id,
- const AudioLayer audioLayer) {
+ const AudioLayer audio_layer) {
// Create the generic ref counted (platform independent) implementation.
rtc::scoped_refptr<AudioDeviceModuleImpl> audioDevice(
- new rtc::RefCountedObject<AudioDeviceModuleImpl>(id, audioLayer));
+ new rtc::RefCountedObject<AudioDeviceModuleImpl>(id, audio_layer));
// Ensure that the current platform is supported.
if (audioDevice->CheckPlatform() == -1)
@@ -122,7 +123,7 @@ AudioDeviceModuleImpl::AudioDeviceModuleImpl(const int32_t id, const AudioLayer
_ptrAudioDevice(NULL),
_id(id),
_platformAudioLayer(audioLayer),
- _lastProcessTime(TickTime::MillisecondTimestamp()),
+ _lastProcessTime(rtc::TimeMillis()),
_platformType(kPlatformNotSupported),
_initialized(false),
_lastError(kAdmErrNone)
@@ -406,7 +407,7 @@ AudioDeviceModuleImpl::~AudioDeviceModuleImpl()
int64_t AudioDeviceModuleImpl::TimeUntilNextProcess()
{
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
int64_t deltaProcess = kAdmMaxIdleTimeProcess - (now - _lastProcessTime);
return deltaProcess;
}
@@ -421,7 +422,7 @@ int64_t AudioDeviceModuleImpl::TimeUntilNextProcess()
void AudioDeviceModuleImpl::Process()
{
- _lastProcessTime = TickTime::MillisecondTimestamp();
+ _lastProcessTime = rtc::TimeMillis();
// kPlayoutWarning
if (_ptrAudioDevice->PlayoutWarning())
@@ -1454,6 +1455,7 @@ int32_t AudioDeviceModuleImpl::InitPlayout()
int32_t AudioDeviceModuleImpl::InitRecording()
{
+ TRACE_EVENT0("webrtc", "AudioDeviceModuleImpl::InitRecording");
CHECK_INITIALIZED();
_audioDeviceBuffer.InitRecording();
return (_ptrAudioDevice->InitRecording());
@@ -1515,6 +1517,7 @@ bool AudioDeviceModuleImpl::Playing() const
int32_t AudioDeviceModuleImpl::StartRecording()
{
+ TRACE_EVENT0("webrtc", "AudioDeviceModuleImpl::StartRecording");
CHECK_INITIALIZED();
return (_ptrAudioDevice->StartRecording());
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
index a112e3e3bf6..044ec4ed6b5 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
@@ -16,7 +16,6 @@
#include <memory>
#include "webrtc/base/checks.h"
-#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_device/audio_device_buffer.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
@@ -48,11 +47,6 @@ class AudioDeviceModuleImpl : public AudioDeviceModule {
int64_t TimeUntilNextProcess() override;
void Process() override;
- // Factory methods (resource allocation/deallocation)
- static rtc::scoped_refptr<AudioDeviceModule> Create(
- const int32_t id,
- const AudioLayer audioLayer = kPlatformDefaultAudio);
-
// Retrieve the currently utilized audio layer
int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
index 2a6ac1ffe9c..7c6d16f129d 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+#include <cstdlib>
#include <cstring>
#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
@@ -26,7 +27,7 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice(
if (!_isConfigured) {
printf("Was compiled with WEBRTC_DUMMY_AUDIO_PLAY_STATIC_FILE "
"but did not set input/output files to use. Bailing out.\n");
- exit(1);
+ std::exit(1);
}
return new FileAudioDevice(id, _inputAudioFilename, _outputAudioFilename);
}
@@ -45,7 +46,7 @@ void FileAudioDeviceFactory::SetFilenamesToUse(
// Sanity: must be compiled with the right define to run this.
printf("Trying to use dummy file devices, but is not compiled "
"with WEBRTC_DUMMY_FILE_DEVICES. Bailing out.\n");
- exit(1);
+ std::exit(1);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
index d8df05cec89..8457a6b7718 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
@@ -11,6 +11,7 @@
#ifndef MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
#define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
+#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/include/module.h"
@@ -51,6 +52,11 @@ class AudioDeviceModule : public RefCountedModule {
};
public:
+ // Create an ADM.
+ static rtc::scoped_refptr<AudioDeviceModule> Create(
+ const int32_t id,
+ const AudioLayer audio_layer);
+
// Retrieve the currently utilized audio layer
virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
index bfa6372203e..9616a285c0a 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
@@ -13,8 +13,7 @@
#include <memory>
-#include "webrtc/base/asyncinvoker.h"
-#include "webrtc/base/objc/RTCMacros.h"
+#include "WebRTC/RTCMacros.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
@@ -42,7 +41,8 @@ class FineAudioBuffer;
// same thread.
class AudioDeviceIOS : public AudioDeviceGeneric,
public AudioSessionObserver,
- public VoiceProcessingAudioUnitObserver {
+ public VoiceProcessingAudioUnitObserver,
+ public rtc::MessageHandler {
public:
AudioDeviceIOS();
~AudioDeviceIOS();
@@ -162,7 +162,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
void OnInterruptionBegin() override;
void OnInterruptionEnd() override;
void OnValidRouteChange() override;
- void OnConfiguredForWebRTC() override;
+ void OnCanPlayOrRecordChange(bool can_play_or_record) override;
// VoiceProcessingAudioUnitObserver methods.
OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
@@ -176,12 +176,16 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
UInt32 num_frames,
AudioBufferList* io_data) override;
+ // Handles messages from posts.
+ void OnMessage(rtc::Message *msg) override;
+
private:
// Called by the relevant AudioSessionObserver methods on |thread_|.
void HandleInterruptionBegin();
void HandleInterruptionEnd();
void HandleValidRouteChange();
- void HandleConfiguredForWebRTC();
+ void HandleCanPlayOrRecordChange(bool can_play_or_record);
+ void HandleSampleRateChange(float sample_rate);
// Uses current |playout_parameters_| and |record_parameters_| to inform the
// audio device buffer (ADB) about our internal audio parameters.
@@ -197,9 +201,13 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
// Creates the audio unit.
bool CreateAudioUnit();
- // Restarts active audio streams using a new sample rate. Required when e.g.
- // a BT headset is enabled or disabled.
- bool RestartAudioUnit(float sample_rate);
+ // Updates the audio unit state based on current state.
+ void UpdateAudioUnit(bool can_play_or_record);
+
+ // Configures the audio session for WebRTC.
+ void ConfigureAudioSession();
+ // Unconfigures the audio session.
+ void UnconfigureAudioSession();
// Activates our audio session, creates and initializes the voice-processing
// audio unit and verifies that we got the preferred native audio parameters.
@@ -213,11 +221,9 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
rtc::ThreadChecker thread_checker_;
// Thread that this object is created on.
rtc::Thread* thread_;
- // Invoker used to execute methods on thread_.
- std::unique_ptr<rtc::AsyncInvoker> async_invoker_;
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
- // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
// The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
// and therefore outlives this object.
AudioDeviceBuffer* audio_device_buffer_;
@@ -284,6 +290,9 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
// Audio interruption observer instance.
RTCAudioSessionDelegateAdapter* audio_session_observer_;
+
+ // Set to true if we've activated the audio session.
+ bool has_configured_session_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
index f6c339fed00..8f6fb4d9b63 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
@@ -27,7 +27,7 @@
#include "webrtc/modules/audio_device/fine_audio_buffer.h"
#include "webrtc/modules/utility/include/helpers_ios.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
@@ -61,6 +61,13 @@ namespace webrtc {
const UInt16 kFixedPlayoutDelayEstimate = 30;
const UInt16 kFixedRecordDelayEstimate = 30;
+enum AudioDeviceMessageType : uint32_t {
+ kMessageTypeInterruptionBegin,
+ kMessageTypeInterruptionEnd,
+ kMessageTypeValidRouteChange,
+ kMessageTypeCanPlayOrRecordChange,
+};
+
using ios::CheckAndLogError;
#if !defined(NDEBUG)
@@ -85,15 +92,15 @@ static void LogDeviceInfo() {
#endif // !defined(NDEBUG)
AudioDeviceIOS::AudioDeviceIOS()
- : async_invoker_(new rtc::AsyncInvoker()),
- audio_device_buffer_(nullptr),
+ : audio_device_buffer_(nullptr),
audio_unit_(nullptr),
recording_(0),
playing_(0),
initialized_(false),
rec_is_initialized_(false),
play_is_initialized_(false),
- is_interrupted_(false) {
+ is_interrupted_(false),
+ has_configured_session_(false) {
LOGI() << "ctor" << ios::GetCurrentThreadDescription();
thread_ = rtc::Thread::Current();
audio_session_observer_ =
@@ -191,6 +198,7 @@ int32_t AudioDeviceIOS::StartPlayout() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(play_is_initialized_);
RTC_DCHECK(!playing_);
+ RTC_DCHECK(audio_unit_);
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetPlayout();
}
@@ -209,7 +217,11 @@ int32_t AudioDeviceIOS::StartPlayout() {
int32_t AudioDeviceIOS::StopPlayout() {
LOGI() << "StopPlayout";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!play_is_initialized_ || !playing_) {
+ if (!play_is_initialized_) {
+ return 0;
+ }
+ if (!playing_) {
+ play_is_initialized_ = false;
return 0;
}
if (!recording_) {
@@ -225,6 +237,7 @@ int32_t AudioDeviceIOS::StartRecording() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(rec_is_initialized_);
RTC_DCHECK(!recording_);
+ RTC_DCHECK(audio_unit_);
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetRecord();
}
@@ -243,7 +256,11 @@ int32_t AudioDeviceIOS::StartRecording() {
int32_t AudioDeviceIOS::StopRecording() {
LOGI() << "StopRecording";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!rec_is_initialized_ || !recording_) {
+ if (!rec_is_initialized_) {
+ return 0;
+ }
+ if (!recording_) {
+ rec_is_initialized_ = false;
return 0;
}
if (!playing_) {
@@ -318,51 +335,24 @@ int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
}
void AudioDeviceIOS::OnInterruptionBegin() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleInterruptionBegin();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionBegin, this));
+ thread_->Post(this, kMessageTypeInterruptionBegin);
}
void AudioDeviceIOS::OnInterruptionEnd() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleInterruptionEnd();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionEnd, this));
+ thread_->Post(this, kMessageTypeInterruptionEnd);
}
void AudioDeviceIOS::OnValidRouteChange() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleValidRouteChange();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleValidRouteChange, this));
+ thread_->Post(this, kMessageTypeValidRouteChange);
}
-void AudioDeviceIOS::OnConfiguredForWebRTC() {
- RTC_DCHECK(async_invoker_);
+void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) {
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleValidRouteChange();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleConfiguredForWebRTC, this));
+ thread_->Post(this, kMessageTypeCanPlayOrRecordChange,
+ new rtc::TypedMessageData<bool>(can_play_or_record));
}
OSStatus AudioDeviceIOS::OnDeliverRecordedData(
@@ -385,6 +375,9 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(
RTCLogWarning(@"Expected %u frames but got %u",
static_cast<unsigned int>(frames_per_buffer),
static_cast<unsigned int>(num_frames));
+
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ RTCLogWarning(@"Session:\n %@", session);
return result;
}
@@ -447,12 +440,36 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
return noErr;
}
+void AudioDeviceIOS::OnMessage(rtc::Message *msg) {
+ switch (msg->message_id) {
+ case kMessageTypeInterruptionBegin:
+ HandleInterruptionBegin();
+ break;
+ case kMessageTypeInterruptionEnd:
+ HandleInterruptionEnd();
+ break;
+ case kMessageTypeValidRouteChange:
+ HandleValidRouteChange();
+ break;
+ case kMessageTypeCanPlayOrRecordChange: {
+ rtc::TypedMessageData<bool>* data =
+ static_cast<rtc::TypedMessageData<bool>*>(msg->pdata);
+ HandleCanPlayOrRecordChange(data->data());
+ delete data;
+ break;
+ }
+ }
+}
+
void AudioDeviceIOS::HandleInterruptionBegin() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- RTCLog(@"Stopping the audio unit due to interruption begin.");
- if (!audio_unit_->Stop()) {
- RTCLogError(@"Failed to stop the audio unit.");
+ if (audio_unit_ &&
+ audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ RTCLog(@"Stopping the audio unit due to interruption begin.");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop the audio unit for interruption begin.");
+ }
}
is_interrupted_ = true;
}
@@ -460,66 +477,95 @@ void AudioDeviceIOS::HandleInterruptionBegin() {
void AudioDeviceIOS::HandleInterruptionEnd() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- RTCLog(@"Starting the audio unit due to interruption end.");
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start the audio unit.");
- }
is_interrupted_ = false;
+ RTCLog(@"Interruption ended. Updating audio unit state.");
+ UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord);
}
void AudioDeviceIOS::HandleValidRouteChange() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ HandleSampleRateChange(session.sampleRate);
+}
+
+void AudioDeviceIOS::HandleCanPlayOrRecordChange(bool can_play_or_record) {
+ RTCLog(@"Handling CanPlayOrRecord change to: %d", can_play_or_record);
+ UpdateAudioUnit(can_play_or_record);
+}
+
+void AudioDeviceIOS::HandleSampleRateChange(float sample_rate) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Handling sample rate change to %f.", sample_rate);
+
// Don't do anything if we're interrupted.
if (is_interrupted_) {
+ RTCLog(@"Ignoring sample rate change to %f due to interruption.",
+ sample_rate);
return;
}
- // Only restart audio for a valid route change if the session sample rate
- // has changed.
- RTCAudioSession* session = [RTCAudioSession sharedInstance];
- const double current_sample_rate = playout_parameters_.sample_rate();
- const double session_sample_rate = session.sampleRate;
- if (current_sample_rate != session_sample_rate) {
- RTCLog(@"Route changed caused sample rate to change from %f to %f. "
- "Restarting audio unit.", current_sample_rate, session_sample_rate);
- if (!RestartAudioUnit(session_sample_rate)) {
- RTCLogError(@"Audio restart failed.");
- }
+ // If we don't have an audio unit yet, or the audio unit is uninitialized,
+ // there is no work to do.
+ if (!audio_unit_ ||
+ audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
+ return;
}
-}
-void AudioDeviceIOS::HandleConfiguredForWebRTC() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-
- // If we're not initialized we don't need to do anything. Audio unit will
- // be initialized on initialization.
- if (!rec_is_initialized_ && !play_is_initialized_)
+ // The audio unit is already initialized or started.
+ // Check to see if the sample rate or buffer size has changed.
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ const double session_sample_rate = session.sampleRate;
+ const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
+ const size_t session_frames_per_buffer =
+ static_cast<size_t>(session_sample_rate * session_buffer_duration + .5);
+ const double current_sample_rate = playout_parameters_.sample_rate();
+ const size_t current_frames_per_buffer =
+ playout_parameters_.frames_per_buffer();
+ RTCLog(@"Handling playout sample rate change to: %f\n"
+ " Session sample rate: %f frames_per_buffer: %lu\n"
+ " ADM sample rate: %f frames_per_buffer: %lu",
+ sample_rate,
+ session_sample_rate, (unsigned long)session_frames_per_buffer,
+ current_sample_rate, (unsigned long)current_frames_per_buffer);;
+
+ // Sample rate and buffer size are the same, no work to do.
+ if (abs(current_sample_rate - session_sample_rate) <= DBL_EPSILON &&
+ current_frames_per_buffer == session_frames_per_buffer) {
return;
+ }
- // If we're initialized, we must have an audio unit.
- RTC_DCHECK(audio_unit_);
+ // We need to adjust our format and buffer sizes.
+ // The stream format is about to be changed and it requires that we first
+ // stop and uninitialize the audio unit to deallocate its resources.
+ RTCLog(@"Stopping and uninitializing audio unit to adjust buffers.");
+ bool restart_audio_unit = false;
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ restart_audio_unit = true;
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
- // Use configured audio session's settings to set up audio device buffer.
- // TODO(tkchin): Use RTCAudioSessionConfiguration to pick up settings and
- // pass it along.
+ // Allocate new buffers given the new stream format.
SetupAudioBuffersForActiveAudioSession();
- // Initialize the audio unit. This will affect any existing audio playback.
- if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
- RTCLogError(@"Failed to initialize audio unit after configuration.");
+ // Initialize the audio unit again with the new sample rate.
+ RTC_DCHECK_EQ(playout_parameters_.sample_rate(), session_sample_rate);
+ if (!audio_unit_->Initialize(session_sample_rate)) {
+ RTCLogError(@"Failed to initialize the audio unit with sample rate: %f",
+ session_sample_rate);
return;
}
- // If we haven't started playing or recording there's nothing more to do.
- if (!playing_ && !recording_)
- return;
-
- // We are in a play or record state, start the audio unit.
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start audio unit after configuration.");
+ // Restart the audio unit if it was already running.
+ if (restart_audio_unit && !audio_unit_->Start()) {
+ RTCLogError(@"Failed to start audio unit with sample rate: %f",
+ session_sample_rate);
return;
}
+ RTCLog(@"Successfully handled sample rate change.");
}
void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
@@ -597,6 +643,7 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// at each input callback when calling AudioUnitRender().
const int data_byte_size = record_parameters_.GetBytesPerBuffer();
record_audio_buffer_.reset(new SInt8[data_byte_size]);
+ memset(record_audio_buffer_.get(), 0, data_byte_size);
audio_record_buffer_list_.mNumberBuffers = 1;
AudioBuffer* audio_buffer = &audio_record_buffer_list_.mBuffers[0];
audio_buffer->mNumberChannels = record_parameters_.channels();
@@ -616,46 +663,117 @@ bool AudioDeviceIOS::CreateAudioUnit() {
return true;
}
-bool AudioDeviceIOS::RestartAudioUnit(float sample_rate) {
- RTCLog(@"Restarting audio unit with new sample rate: %f", sample_rate);
+void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Updating audio unit state. CanPlayOrRecord=%d IsInterrupted=%d",
+ can_play_or_record, is_interrupted_);
- // Stop the active audio unit.
- if (!audio_unit_->Stop()) {
- RTCLogError(@"Failed to stop the audio unit.");
- return false;
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio unit update due to interruption.");
+ return;
}
- // The stream format is about to be changed and it requires that we first
- // uninitialize it to deallocate its resources.
- if (!audio_unit_->Uninitialize()) {
- RTCLogError(@"Failed to uninitialize the audio unit.");
- return false;
+ // If we're not initialized we don't need to do anything. Audio unit will
+ // be initialized on initialization.
+ if (!rec_is_initialized_ && !play_is_initialized_)
+ return;
+
+ // If we're initialized, we must have an audio unit.
+ RTC_DCHECK(audio_unit_);
+
+ bool should_initialize_audio_unit = false;
+ bool should_uninitialize_audio_unit = false;
+ bool should_start_audio_unit = false;
+ bool should_stop_audio_unit = false;
+
+ switch (audio_unit_->GetState()) {
+ case VoiceProcessingAudioUnit::kInitRequired:
+ RTC_NOTREACHED();
+ break;
+ case VoiceProcessingAudioUnit::kUninitialized:
+ should_initialize_audio_unit = can_play_or_record;
+ should_start_audio_unit = should_initialize_audio_unit &&
+ (playing_ || recording_);
+ break;
+ case VoiceProcessingAudioUnit::kInitialized:
+ should_start_audio_unit =
+ can_play_or_record && (playing_ || recording_);
+ should_uninitialize_audio_unit = !can_play_or_record;
+ break;
+ case VoiceProcessingAudioUnit::kStarted:
+ RTC_DCHECK(playing_ || recording_);
+ should_stop_audio_unit = !can_play_or_record;
+ should_uninitialize_audio_unit = should_stop_audio_unit;
+ break;
+ }
+
+ if (should_initialize_audio_unit) {
+ RTCLog(@"Initializing audio unit for UpdateAudioUnit");
+ ConfigureAudioSession();
+ SetupAudioBuffersForActiveAudioSession();
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize audio unit.");
+ return;
+ }
}
- // Allocate new buffers given the new stream format.
- SetupAudioBuffersForActiveAudioSession();
+ if (should_start_audio_unit) {
+ RTCLog(@"Starting audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Start()) {
+ RTCLogError(@"Failed to start audio unit.");
+ return;
+ }
+ }
- // Initialize the audio unit again with the new sample rate.
- RTC_DCHECK_EQ(playout_parameters_.sample_rate(), sample_rate);
- if (!audio_unit_->Initialize(sample_rate)) {
- RTCLogError(@"Failed to initialize the audio unit with sample rate: %f",
- sample_rate);
- return false;
+ if (should_stop_audio_unit) {
+ RTCLog(@"Stopping audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop audio unit.");
+ return;
+ }
}
- // Restart the audio unit.
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start audio unit.");
- return false;
+ if (should_uninitialize_audio_unit) {
+ RTCLog(@"Uninitializing audio unit for UpdateAudioUnit");
+ audio_unit_->Uninitialize();
+ UnconfigureAudioSession();
}
- RTCLog(@"Successfully restarted audio unit.");
+}
- return true;
+void AudioDeviceIOS::ConfigureAudioSession() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return;
+ }
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ [session lockForConfiguration];
+ [session configureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+}
+
+void AudioDeviceIOS::UnconfigureAudioSession() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Unconfiguring audio session.");
+ if (!has_configured_session_) {
+ RTCLogWarning(@"Audio session already unconfigured.");
+ return;
+ }
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ [session lockForConfiguration];
+ [session unconfigureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = false;
+ RTCLog(@"Unconfigured audio session.");
}
bool AudioDeviceIOS::InitPlayOrRecord() {
LOGI() << "InitPlayOrRecord";
+ // There should be no audio unit at this point.
if (!CreateAudioUnit()) {
return false;
}
@@ -674,14 +792,11 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
return false;
}
- // If we are already configured properly, we can initialize the audio unit.
- if (session.isConfiguredForWebRTC) {
- [session unlockForConfiguration];
+ // If we are ready to play or record, initialize the audio unit.
+ if (session.canPlayOrRecord) {
+ ConfigureAudioSession();
SetupAudioBuffersForActiveAudioSession();
- // Audio session has been marked ready for WebRTC so we can initialize the
- // audio unit now.
audio_unit_->Initialize(playout_parameters_.sample_rate());
- return true;
}
// Release the lock.
@@ -694,9 +809,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
LOGI() << "ShutdownPlayOrRecord";
// Close and delete the voice-processing I/O unit.
- if (audio_unit_) {
- audio_unit_.reset();
- }
+ audio_unit_.reset();
// Remove audio session notification observers.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
@@ -705,6 +818,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
// All I/O should be stopped or paused prior to deactivating the audio
// session, hence we deactivate as last action.
[session lockForConfiguration];
+ UnconfigureAudioSession();
[session endWebRTCSession:nil];
[session unlockForConfiguration];
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
index 4dfb073fa9f..ec10119a113 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
@@ -537,7 +537,7 @@ class AudioDeviceTest : public ::testing::Test {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
AudioDeviceModule::AudioLayer audio_layer) {
rtc::scoped_refptr<AudioDeviceModule> module(
- AudioDeviceModuleImpl::Create(0, audio_layer));
+ AudioDeviceModule::Create(0, audio_layer));
return module;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
index 6c4a9cd9e27..def8c2322b0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
@@ -28,8 +28,8 @@ class AudioSessionObserver {
// Called when audio route changes.
virtual void OnValidRouteChange() = 0;
- // Called when audio session has been configured for WebRTC.
- virtual void OnConfiguredForWebRTC() = 0;
+ // Called when the ability to play or record changes.
+ virtual void OnCanPlayOrRecordChange(bool can_play_or_record) = 0;
protected:
virtual ~AudioSessionObserver() {}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
index 06ddddd9bce..5a7600a5d39 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
@@ -10,18 +10,34 @@
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
@implementation RTCAudioSession (Configuration)
-- (BOOL)isConfiguredForWebRTC {
- return self.savedConfiguration != nil;
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:NO
+ shouldSetActive:NO
+ error:outError];
+}
+
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:active
+ shouldSetActive:YES
+ error:outError];
}
+#pragma mark - Private
+
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
active:(BOOL)active
+ shouldSetActive:(BOOL)shouldSetActive
error:(NSError **)outError {
NSParameterAssert(configuration);
if (outError) {
@@ -61,8 +77,22 @@
}
}
- // self.sampleRate is accurate only if the audio session is active.
- if (!self.isActive || self.sampleRate != configuration.sampleRate) {
+ // Sometimes category options don't stick after setting mode.
+ if (self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category options: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category options to: %ld",
+ (long)configuration.categoryOptions);
+ }
+ }
+
+ if (self.preferredSampleRate != configuration.sampleRate) {
NSError *sampleRateError = nil;
if (![self setPreferredSampleRate:configuration.sampleRate
error:&sampleRateError]) {
@@ -75,9 +105,7 @@
}
}
- // self.IOBufferDuration is accurate only if the audio session is active.
- if (!self.isActive ||
- self.IOBufferDuration != configuration.ioBufferDuration) {
+ if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
NSError *bufferDurationError = nil;
if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
error:&bufferDurationError]) {
@@ -90,11 +118,13 @@
}
}
- NSError *activeError = nil;
- if (![self setActive:active error:&activeError]) {
- RTCLogError(@"Failed to setActive to %d: %@",
- active, activeError.localizedDescription);
- error = activeError;
+ if (shouldSetActive) {
+ NSError *activeError = nil;
+ if (![self setActive:active error:&activeError]) {
+ RTCLogError(@"Failed to setActive to %d: %@",
+ active, activeError.localizedDescription);
+ error = activeError;
+ }
}
if (self.isActive &&
@@ -138,84 +168,4 @@
return error == nil;
}
-- (BOOL)configureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Configuring audio session for WebRTC.");
-
- if (self.isConfiguredForWebRTC) {
- RTCLogError(@"Already configured.");
- if (outError) {
- *outError =
- [self configurationErrorWithDescription:@"Already configured."];
- }
- return NO;
- }
-
- // Configure the AVAudioSession and activate it.
- // Provide an error even if there isn't one so we can log it.
- NSError *error = nil;
- RTCAudioSessionConfiguration *currentConfig =
- [RTCAudioSessionConfiguration currentConfiguration];
- RTCAudioSessionConfiguration *webRTCConfig =
- [RTCAudioSessionConfiguration webRTCConfiguration];
- self.savedConfiguration = currentConfig;
- if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
- RTCLogError(@"Failed to set WebRTC audio configuration: %@",
- error.localizedDescription);
- [self unconfigureWebRTCSession:nil];
- if (outError) {
- *outError = error;
- }
- return NO;
- }
-
- // Ensure that the device currently supports audio input.
- // TODO(tkchin): Figure out if this is really necessary.
- if (!self.inputAvailable) {
- RTCLogError(@"No audio input path is available!");
- [self unconfigureWebRTCSession:nil];
- if (outError) {
- *outError = [self configurationErrorWithDescription:@"No input path."];
- }
- return NO;
- }
-
- // Give delegates a chance to process the event. In particular, the audio
- // devices listening to this event will initialize their audio units.
- [self notifyDidConfigure];
-
- return YES;
-}
-
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Unconfiguring audio session for WebRTC.");
-
- if (!self.isConfiguredForWebRTC) {
- RTCLogError(@"Already unconfigured.");
- if (outError) {
- *outError =
- [self configurationErrorWithDescription:@"Already unconfigured."];
- }
- return NO;
- }
-
- [self setConfiguration:self.savedConfiguration active:NO error:outError];
- self.savedConfiguration = nil;
-
- [self notifyDidUnconfigure];
-
- return YES;
-}
-
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
index c6738e7a033..cb506c345a9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
@@ -28,11 +28,8 @@ NS_ASSUME_NONNULL_BEGIN
*/
@property(nonatomic, readonly) int webRTCSessionCount;
-/** The configuration of the audio session before configureWebRTCSession
- * was first called.
- */
-@property(nonatomic, strong, nullable)
- RTCAudioSessionConfiguration *savedConfiguration;
+/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
+@property(readonly) BOOL canPlayOrRecord;
- (BOOL)checkLock:(NSError **)outError;
@@ -55,6 +52,22 @@ NS_ASSUME_NONNULL_BEGIN
*/
- (BOOL)endWebRTCSession:(NSError **)outError;
+/** Configure the audio session for WebRTC. This call will fail if the session
+ * is already configured. On other failures, we will attempt to restore the
+ * previously used audio session configuration.
+ * |lockForConfiguration| must be called first.
+ * Successful calls to configureWebRTCSession must be matched by calls to
+ * |unconfigureWebRTCSession|.
+ */
+- (BOOL)configureWebRTCSession:(NSError **)outError;
+
+/** Unconfigures the session for WebRTC. This will attempt to restore the
+ * audio session to the settings used before |configureWebRTCSession| was
+ * called.
+ * |lockForConfiguration| must be called first.
+ */
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+
/** Returns a configuration error with the given description. */
- (NSError *)configurationErrorWithDescription:(NSString *)description;
@@ -69,10 +82,9 @@ NS_ASSUME_NONNULL_BEGIN
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
- (void)notifyMediaServicesWereLost;
- (void)notifyMediaServicesWereReset;
-- (void)notifyShouldConfigure;
-- (void)notifyShouldUnconfigure;
-- (void)notifyDidConfigure;
-- (void)notifyDidUnconfigure;
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+- (void)notifyDidStartPlayOrRecord;
+- (void)notifyDidStopPlayOrRecord;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
index ab8dbc8859e..274cc2bc978 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
@@ -58,29 +58,18 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
-/** Called on a WebRTC thread when WebRTC needs to take over audio. Applications
- * should call -[RTCAudioSession configureWebRTCSession] to allow WebRTC to
- * play and record audio. Will only occur if shouldDelayAudioConfiguration is
- * set to YES.
- */
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session;
-
-/** Called on a WebRTC thread when WebRTC no longer requires audio. Applications
- * should call -[RTCAudioSession unconfigureWebRTCSession] to restore their
- * audio session settings. Will only occur if shouldDelayAudioConfiguration is
- * set to YES.
- */
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session;
+- (void)audioSession:(RTCAudioSession *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
-/** Called on a WebRTC thread when WebRTC has configured the audio session for
- * WebRTC audio.
+/** Called on a WebRTC thread when the audio device is notified to begin
+ * playback or recording.
*/
-- (void)audioSessionDidConfigure:(RTCAudioSession *)session;
+- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session;
-/** Called on a WebRTC thread when WebRTC has unconfigured the audio session for
- * WebRTC audio.
+/** Called on a WebRTC thread when the audio device is notified to stop
+ * playback or recording.
*/
-- (void)audioSessionDidUnconfigure:(RTCAudioSession *)session;
+- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session;
@end
@@ -108,11 +97,24 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
/** If YES, WebRTC will not initialize the audio unit automatically when an
* audio track is ready for playout or recording. Instead, applications should
- * listen to the delegate method |audioSessionShouldConfigure| and configure
- * the session manually. This should be set before making WebRTC media calls
- * and should not be changed while a call is active.
+ * call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit
+ * as soon as an audio track is ready for playout or recording.
*/
-@property(nonatomic, assign) BOOL shouldDelayAudioConfiguration;
+@property(nonatomic, assign) BOOL useManualAudio;
+
+/** This property is only effective if useManualAudio is YES.
+ * Represents permission for WebRTC to initialize the VoIP audio unit.
+ * When set to NO, if the VoIP audio unit used by WebRTC is active, it will be
+ * stopped and uninitialized. This will stop incoming and outgoing audio.
+ * When set to YES, WebRTC will initialize and start the audio unit when it is
+ * needed (e.g. due to establishing an audio connection).
+ * This property was introduced to work around an issue where if an AVPlayer is
+ * playing audio while the VoIP audio unit is initialized, its audio would be
+ * either cut off completely or played at a reduced volume. By preventing
+ * the audio unit from being initialized until after the audio has completed,
+ * we are able to prevent the abrupt cutoff.
+ */
+@property(nonatomic, assign) BOOL isAudioEnabled;
// Proxy properties.
@property(readonly) NSString *category;
@@ -134,12 +136,14 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *outputDataSource;
@property(readonly) double sampleRate;
+@property(readonly) double preferredSampleRate;
@property(readonly) NSInteger inputNumberOfChannels;
@property(readonly) NSInteger outputNumberOfChannels;
@property(readonly) float outputVolume;
@property(readonly) NSTimeInterval inputLatency;
@property(readonly) NSTimeInterval outputLatency;
@property(readonly) NSTimeInterval IOBufferDuration;
+@property(readonly) NSTimeInterval preferredIOBufferDuration;
/** Default constructor. */
+ (instancetype)sharedInstance;
@@ -196,36 +200,20 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
@interface RTCAudioSession (Configuration)
-/** Whether or not |configureWebRTCSession| has been called without a balanced
- * call to |unconfigureWebRTCSession|. This is not an indication of whether the
- * audio session has the right settings.
- */
-@property(readonly) BOOL isConfiguredForWebRTC;
-
/** Applies the configuration to the current session. Attempts to set all
* properties even if previous ones fail. Only the last error will be
- * returned. Also calls setActive with |active|.
+ * returned.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- active:(BOOL)active
error:(NSError **)outError;
-/** Configure the audio session for WebRTC. This call will fail if the session
- * is already configured. On other failures, we will attempt to restore the
- * previously used audio session configuration.
- * |lockForConfiguration| must be called first.
- * Successful calls to configureWebRTCSession must be matched by calls to
- * |unconfigureWebRTCSession|.
- */
-- (BOOL)configureWebRTCSession:(NSError **)outError;
-
-/** Unconfigures the session for WebRTC. This will attempt to restore the
- * audio session to the settings used before |configureWebRTCSession| was
- * called.
+/** Convenience method that calls both setConfiguration and setActive.
* |lockForConfiguration| must be called first.
*/
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
index c6e3677b846..7ef5110f476 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
@@ -15,8 +15,9 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/audio_device/ios/audio_device_ios.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
+#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
@@ -32,12 +33,13 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
volatile int _lockRecursionCount;
volatile int _webRTCSessionCount;
BOOL _isActive;
- BOOL _shouldDelayAudioConfiguration;
+ BOOL _useManualAudio;
+ BOOL _isAudioEnabled;
+ BOOL _canPlayOrRecord;
}
@synthesize session = _session;
@synthesize delegates = _delegates;
-@synthesize savedConfiguration = _savedConfiguration;
+ (instancetype)sharedInstance {
static dispatch_once_t onceToken;
@@ -81,6 +83,9 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (NSString *)description {
NSString *format =
@"RTCAudioSession: {\n"
+ " category: %@\n"
+ " categoryOptions: %ld\n"
+ " mode: %@\n"
" isActive: %d\n"
" sampleRate: %.2f\n"
" IOBufferDuration: %f\n"
@@ -90,6 +95,7 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
" inputLatency: %f\n"
"}";
NSString *description = [NSString stringWithFormat:format,
+ self.category, (long)self.categoryOptions, self.mode,
self.isActive, self.sampleRate, self.IOBufferDuration,
self.outputNumberOfChannels, self.inputNumberOfChannels,
self.outputLatency, self.inputLatency];
@@ -112,20 +118,35 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return _lockRecursionCount > 0;
}
-- (void)setShouldDelayAudioConfiguration:(BOOL)shouldDelayAudioConfiguration {
+- (void)setUseManualAudio:(BOOL)useManualAudio {
@synchronized(self) {
- // No one should be changing this while an audio device is active.
- RTC_DCHECK(!self.isConfiguredForWebRTC);
- if (_shouldDelayAudioConfiguration == shouldDelayAudioConfiguration) {
+ if (_useManualAudio == useManualAudio) {
return;
}
- _shouldDelayAudioConfiguration = shouldDelayAudioConfiguration;
+ _useManualAudio = useManualAudio;
}
+ [self updateCanPlayOrRecord];
}
-- (BOOL)shouldDelayAudioConfiguration {
+- (BOOL)useManualAudio {
@synchronized(self) {
- return _shouldDelayAudioConfiguration;
+ return _useManualAudio;
+ }
+}
+
+- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
+ @synchronized(self) {
+ if (_isAudioEnabled == isAudioEnabled) {
+ return;
+ }
+ _isAudioEnabled = isAudioEnabled;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)isAudioEnabled {
+ @synchronized(self) {
+ return _isAudioEnabled;
}
}
@@ -147,7 +168,8 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
@synchronized(self) {
_delegates.erase(std::remove(_delegates.begin(),
_delegates.end(),
- delegate));
+ delegate),
+ _delegates.end());
[self removeZeroedDelegates];
}
}
@@ -231,6 +253,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return self.session.sampleRate;
}
+- (double)preferredSampleRate {
+ return self.session.preferredSampleRate;
+}
+
- (NSInteger)inputNumberOfChannels {
return self.session.inputNumberOfChannels;
}
@@ -255,6 +281,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return self.session.IOBufferDuration;
}
+- (NSTimeInterval)preferredIOBufferDuration {
+ return self.session.preferredIOBufferDuration;
+}
+
// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
// can just do atomic increments / decrements.
- (BOOL)setActive:(BOOL)active
@@ -496,21 +526,6 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
-- (void)setSavedConfiguration:(RTCAudioSessionConfiguration *)configuration {
- @synchronized(self) {
- if (_savedConfiguration == configuration) {
- return;
- }
- _savedConfiguration = configuration;
- }
-}
-
-- (RTCAudioSessionConfiguration *)savedConfiguration {
- @synchronized(self) {
- return _savedConfiguration;
- }
-}
-
// TODO(tkchin): check for duplicates.
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
@synchronized(self) {
@@ -520,11 +535,11 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (void)removeZeroedDelegates {
@synchronized(self) {
- for (auto it = _delegates.begin(); it != _delegates.end(); ++it) {
- if (!*it) {
- _delegates.erase(it);
- }
- }
+ _delegates.erase(
+ std::remove_if(_delegates.begin(),
+ _delegates.end(),
+ [](id delegate) -> bool { return delegate == nil; }),
+ _delegates.end());
}
}
@@ -546,6 +561,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return _webRTCSessionCount;
}
+- (BOOL)canPlayOrRecord {
+ return !self.useManualAudio || self.isAudioEnabled;
+}
+
- (BOOL)checkLock:(NSError **)outError {
// Check ivar instead of trying to acquire lock so that we won't accidentally
// acquire lock if it hasn't already been called.
@@ -565,79 +584,70 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
if (![self checkLock:outError]) {
return NO;
}
- NSInteger sessionCount = rtc::AtomicOps::Increment(&_webRTCSessionCount);
- if (sessionCount > 1) {
- // Should already be configured.
- RTC_DCHECK(self.isConfiguredForWebRTC);
- return YES;
- }
+ rtc::AtomicOps::Increment(&_webRTCSessionCount);
+ [self notifyDidStartPlayOrRecord];
+ return YES;
+}
- // Only perform configuration steps once. Application might have already
- // configured the session.
- if (self.isConfiguredForWebRTC) {
- // Nothing more to do, already configured.
- return YES;
+- (BOOL)endWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
}
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ rtc::AtomicOps::Decrement(&_webRTCSessionCount);
+ [self notifyDidStopPlayOrRecord];
+ return YES;
+}
- // If application has prevented automatic configuration, return here and wait
- // for application to call configureWebRTCSession.
- if (self.shouldDelayAudioConfiguration) {
- [self notifyShouldConfigure];
- return YES;
+- (BOOL)configureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ if (![self checkLock:outError]) {
+ return NO;
}
+ RTCLog(@"Configuring audio session for WebRTC.");
- // Configure audio session.
+ // Configure the AVAudioSession and activate it.
+ // Provide an error even if there isn't one so we can log it.
NSError *error = nil;
- if (![self configureWebRTCSession:&error]) {
- RTCLogError(@"Error configuring audio session: %@",
+ RTCAudioSessionConfiguration *webRTCConfig =
+ [RTCAudioSessionConfiguration webRTCConfiguration];
+ if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
+ RTCLogError(@"Failed to set WebRTC audio configuration: %@",
error.localizedDescription);
+ [self unconfigureWebRTCSession:nil];
if (outError) {
*outError = error;
}
return NO;
}
+ // Ensure that the device currently supports audio input.
+ // TODO(tkchin): Figure out if this is really necessary.
+ if (!self.inputAvailable) {
+ RTCLogError(@"No audio input path is available!");
+ [self unconfigureWebRTCSession:nil];
+ if (outError) {
+ *outError = [self configurationErrorWithDescription:@"No input path."];
+ }
+ return NO;
+ }
+
return YES;
}
-- (BOOL)endWebRTCSession:(NSError **)outError {
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
- int sessionCount = rtc::AtomicOps::Decrement(&_webRTCSessionCount);
- RTC_DCHECK_GE(sessionCount, 0);
- if (sessionCount != 0) {
- // Should still be configured.
- RTC_DCHECK(self.isConfiguredForWebRTC);
- return YES;
- }
-
- // Only unconfigure if application has not done it.
- if (!self.isConfiguredForWebRTC) {
- // Nothing more to do, already unconfigured.
- return YES;
- }
-
- // If application has prevented automatic configuration, return here and wait
- // for application to call unconfigureWebRTCSession.
- if (self.shouldDelayAudioConfiguration) {
- [self notifyShouldUnconfigure];
- return YES;
- }
-
- // Unconfigure audio session.
- NSError *error = nil;
- if (![self unconfigureWebRTCSession:&error]) {
- RTCLogError(@"Error unconfiguring audio session: %@",
- error.localizedDescription);
- if (outError) {
- *outError = error;
- }
- return NO;
- }
+ RTCLog(@"Unconfiguring audio session for WebRTC.");
+ [self setActive:NO error:outError];
return YES;
}
@@ -666,6 +676,22 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
+- (void)updateCanPlayOrRecord {
+ BOOL canPlayOrRecord = NO;
+ BOOL shouldNotify = NO;
+ @synchronized(self) {
+ canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
+ if (_canPlayOrRecord == canPlayOrRecord) {
+ return;
+ }
+ _canPlayOrRecord = canPlayOrRecord;
+ shouldNotify = YES;
+ }
+ if (shouldNotify) {
+ [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+}
+
- (void)notifyDidBeginInterruption {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidBeginInterruption:);
@@ -716,38 +742,29 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
-- (void)notifyShouldConfigure {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionShouldConfigure:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionShouldConfigure:self];
- }
- }
-}
-
-- (void)notifyShouldUnconfigure {
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionShouldUnconfigure:);
+ SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionShouldUnconfigure:self];
+ [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
}
}
}
-- (void)notifyDidConfigure {
+- (void)notifyDidStartPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidConfigure:);
+ SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidConfigure:self];
+ [delegate audioSessionDidStartPlayOrRecord:self];
}
}
}
-- (void)notifyDidUnconfigure {
+- (void)notifyDidStopPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidUnconfigure:);
+ SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidUnconfigure:self];
+ [delegate audioSessionDidStopPlayOrRecord:self];
}
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
index 4273d4d8634..7832a82d4f3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
@@ -37,6 +37,8 @@ extern const double kRTCAudioSessionLowComplexityIOBufferDuration;
+ (instancetype)currentConfiguration;
/** Returns the configuration that WebRTC needs. */
+ (instancetype)webRTCConfiguration;
+/** Provide a way to override the default configuration. */
++ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
index 086725172bd..d2d04835499 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
@@ -10,6 +10,8 @@
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
+#import "WebRTC/RTCDispatcher.h"
+
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
// Try to use mono to save resources. Also avoids channel format conversion
@@ -49,6 +51,8 @@ const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.01;
// TODO(henrika): monitor this size and determine if it should be modified.
const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
+static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
+
@implementation RTCAudioSessionConfiguration
@synthesize category = _category;
@@ -96,6 +100,10 @@ const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
return self;
}
++ (void)initialize {
+ gWebRTCConfiguration = [[self alloc] init];
+}
+
+ (instancetype)currentConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration *config =
@@ -111,7 +119,15 @@ const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
}
+ (instancetype)webRTCConfiguration {
- return [[self alloc] init];
+ @synchronized(self) {
+ return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
+ }
+}
+
++ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
+ @synchronized(self) {
+ gWebRTCConfiguration = configuration;
+ }
}
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
index 21e8c3e84b1..b554e51ece0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
@@ -12,7 +12,7 @@
#include "webrtc/modules/audio_device/ios/audio_session_observer.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
@implementation RTCAudioSessionDelegateAdapter {
webrtc::AudioSessionObserver *_observer;
@@ -70,14 +70,15 @@
- (void)audioSessionMediaServicesWereReset:(RTCAudioSession *)session {
}
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
+- (void)audioSession:(RTCAudioSession *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
}
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
+- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
}
-- (void)audioSessionDidConfigure:(RTCAudioSession *)session {
- _observer->OnConfiguredForWebRTC();
+- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
}
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
index 603e450c758..7cbd2a982a9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
@@ -46,6 +46,28 @@
@end
+// A delegate that adds itself to the audio session on init and removes itself
+// in its dealloc.
+@interface RTCTestRemoveOnDeallocDelegate : RTCAudioSessionTestDelegate
+@end
+
+@implementation RTCTestRemoveOnDeallocDelegate
+
+- (instancetype)init {
+ if (self = [super init]) {
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ [session addDelegate:self];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ [session removeDelegate:self];
+}
+
+@end
+
@interface RTCAudioSessionTest : NSObject
@@ -142,6 +164,18 @@
EXPECT_TRUE(session.delegates[0]);
}
+// Tests that we don't crash when removing delegates in dealloc.
+// Added as a regression test.
+- (void)testRemoveDelegateOnDealloc {
+ @autoreleasepool {
+ RTCTestRemoveOnDeallocDelegate *delegate =
+ [[RTCTestRemoveOnDeallocDelegate alloc] init];
+ EXPECT_TRUE(delegate);
+ }
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
@end
namespace webrtc {
@@ -176,4 +210,9 @@ TEST_F(AudioSessionTest, ZeroingWeakDelegate) {
[test testZeroingWeakDelegate];
}
+TEST_F(AudioSessionTest, RemoveDelegateOnDealloc) {
+ RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
+ [test testRemoveDelegateOnDealloc];
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm b/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
index db756a4972c..db7f42edcb9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
@@ -12,7 +12,7 @@
#include "webrtc/base/checks.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
#if !defined(NDEBUG)
@@ -175,7 +175,7 @@ VoiceProcessingAudioUnit::State VoiceProcessingAudioUnit::GetState() const {
bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
RTC_DCHECK_GE(state_, kUninitialized);
- RTCLog(@"Initializing audio unit.");
+ RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate);
OSStatus result = noErr;
AudioStreamBasicDescription format = GetFormat(sample_rate);
@@ -228,7 +228,9 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
[NSThread sleepForTimeInterval:0.1f];
result = AudioUnitInitialize(vpio_unit_);
}
- RTCLog(@"Voice Processing I/O unit is now initialized.");
+ if (result == noErr) {
+ RTCLog(@"Voice Processing I/O unit is now initialized.");
+ }
state_ = kInitialized;
return true;
}
@@ -241,6 +243,8 @@ bool VoiceProcessingAudioUnit::Start() {
if (result != noErr) {
RTCLogError(@"Failed to start audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Started audio unit");
}
state_ = kStarted;
return true;
@@ -254,7 +258,10 @@ bool VoiceProcessingAudioUnit::Stop() {
if (result != noErr) {
RTCLogError(@"Failed to stop audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Stopped audio unit");
}
+
state_ = kInitialized;
return true;
}
@@ -267,7 +274,11 @@ bool VoiceProcessingAudioUnit::Uninitialize() {
if (result != noErr) {
RTCLogError(@"Failed to uninitialize audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Uninitialized audio unit.");
}
+
+ state_ = kUninitialized;
return true;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc b/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
index f37d89cd9ca..dad42a0c0b3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
@@ -142,8 +142,7 @@ class AudioDeviceAPITest: public testing::Test {
virtual ~AudioDeviceAPITest() {}
static void SetUpTestCase() {
- process_thread_ =
- rtc::ScopedToUnique(ProcessThread::Create("ProcessThread"));
+ process_thread_ = ProcessThread::Create("ProcessThread");
process_thread_->Start();
// Windows:
@@ -154,75 +153,75 @@ class AudioDeviceAPITest: public testing::Test {
const int32_t kId = 444;
#if defined(_WIN32)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined!\n\n");
// create default implementation (=Core Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// create non-default (=Wave Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Core Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) != NULL);
#else
TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined!\n");
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
// create default implementation (=Wave Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Wave Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
#endif
#endif
#if defined(ANDROID)
// Fails tests
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
// Create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
#elif defined(WEBRTC_LINUX)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
// create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_->Terminate());
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Pulse Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) != NULL);
#endif
#if defined(WEBRTC_MAC)
// Fails tests
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
// Create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc b/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
index bb7686c6c18..f16f296011b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
@@ -594,16 +594,15 @@ FuncTestManager::~FuncTestManager()
int32_t FuncTestManager::Init()
{
- EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
- ProcessThread::Create("ProcessThread"))) != NULL);
- if (_processThread == NULL)
- {
- return -1;
+ EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
+ NULL);
+ if (_processThread == NULL) {
+ return -1;
}
_processThread->Start();
// create the Audio Device module
- EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((_audioDevice = AudioDeviceModule::Create(
555, ADM_AUDIO_LAYER)) != NULL);
if (_audioDevice == NULL)
{
@@ -832,8 +831,8 @@ int32_t FuncTestManager::TestAudioLayerSelection()
// ==================================================
// Next, try to make fresh start with new audio layer
- EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
- ProcessThread::Create("ProcessThread"))) != NULL);
+ EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
+ NULL);
if (_processThread == NULL)
{
return -1;
@@ -843,12 +842,12 @@ int32_t FuncTestManager::TestAudioLayerSelection()
// create the Audio Device module based on selected audio layer
if (tryWinWave)
{
- _audioDevice = AudioDeviceModuleImpl::Create(
+ _audioDevice = AudioDeviceModule::Create(
555,
AudioDeviceModule::kWindowsWaveAudio);
} else if (tryWinCore)
{
- _audioDevice = AudioDeviceModuleImpl::Create(
+ _audioDevice = AudioDeviceModule::Create(
555,
AudioDeviceModule::kWindowsCoreAudio);
}
@@ -857,7 +856,7 @@ int32_t FuncTestManager::TestAudioLayerSelection()
{
TEST_LOG("\nERROR: Switch of audio layer failed!\n");
// restore default audio layer instead
- EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((_audioDevice = AudioDeviceModule::Create(
555, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
index 8079051184f..c1497089926 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/win/audio_device_wave_win.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include <windows.h>
@@ -206,7 +206,7 @@ int32_t AudioDeviceWindowsWave::Init()
return 0;
}
- const uint32_t nowTime(TickTime::MillisecondTimestamp());
+ const uint32_t nowTime(rtc::TimeMillis());
_recordedBytes = 0;
_prevRecByteCheckTime = nowTime;
@@ -3038,7 +3038,7 @@ bool AudioDeviceWindowsWave::ThreadProcess()
return true;
}
- time = TickTime::MillisecondTimestamp();
+ time = rtc::TimeMillis();
if (_startPlay)
{
diff --git a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
index 22c904d52f2..a9650f74296 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
@@ -24,17 +24,16 @@ source_set("audio_processing") {
sources = [
"aec/aec_core.cc",
"aec/aec_core.h",
- "aec/aec_core_internal.h",
- "aec/aec_rdft.c",
+ "aec/aec_core_optimized_methods.h",
+ "aec/aec_rdft.cc",
"aec/aec_rdft.h",
"aec/aec_resampler.cc",
"aec/aec_resampler.h",
"aec/echo_cancellation.cc",
"aec/echo_cancellation.h",
- "aec/echo_cancellation_internal.h",
- "aecm/aecm_core.c",
+ "aecm/aecm_core.cc",
"aecm/aecm_core.h",
- "aecm/echo_control_mobile.c",
+ "aecm/echo_control_mobile.cc",
"aecm/echo_control_mobile.h",
"agc/agc.cc",
"agc/agc.h",
@@ -81,9 +80,8 @@ source_set("audio_processing") {
"intelligibility/intelligibility_utils.h",
"level_estimator_impl.cc",
"level_estimator_impl.h",
- "logging/aec_logging.h",
- "logging/aec_logging_file_handling.cc",
- "logging/aec_logging_file_handling.h",
+ "logging/apm_data_dumper.cc",
+ "logging/apm_data_dumper.h",
"noise_suppression_impl.cc",
"noise_suppression_impl.h",
"render_queue_item_verifier.h",
@@ -110,10 +108,10 @@ source_set("audio_processing") {
"typing_detection.h",
"utility/block_mean_calculator.cc",
"utility/block_mean_calculator.h",
- "utility/delay_estimator.c",
+ "utility/delay_estimator.cc",
"utility/delay_estimator.h",
"utility/delay_estimator_internal.h",
- "utility/delay_estimator_wrapper.c",
+ "utility/delay_estimator_wrapper.cc",
"utility/delay_estimator_wrapper.h",
"vad/common.h",
"vad/gmm.cc",
@@ -149,7 +147,9 @@ source_set("audio_processing") {
]
if (aec_debug_dump) {
- defines += [ "WEBRTC_AEC_DEBUG_DUMP" ]
+ defines += [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines += [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
}
if (aec_untrusted_delay_for_testing) {
@@ -196,15 +196,15 @@ source_set("audio_processing") {
}
if (current_cpu == "mipsel") {
- sources += [ "aecm/aecm_core_mips.c" ]
+ sources += [ "aecm/aecm_core_mips.cc" ]
if (mips_float_abi == "hard") {
sources += [
"aec/aec_core_mips.cc",
- "aec/aec_rdft_mips.c",
+ "aec/aec_rdft_mips.cc",
]
}
} else {
- sources += [ "aecm/aecm_core_c.c" ]
+ sources += [ "aecm/aecm_core_c.cc" ]
}
if (is_win) {
@@ -241,7 +241,7 @@ if (current_cpu == "x86" || current_cpu == "x64") {
source_set("audio_processing_sse2") {
sources = [
"aec/aec_core_sse2.cc",
- "aec/aec_rdft_sse2.c",
+ "aec/aec_rdft_sse2.cc",
]
if (is_posix) {
@@ -250,6 +250,12 @@ if (current_cpu == "x86" || current_cpu == "x64") {
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
+
+ if (aec_debug_dump) {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
+ }
}
}
@@ -257,8 +263,8 @@ if (rtc_build_with_neon) {
source_set("audio_processing_neon") {
sources = [
"aec/aec_core_neon.cc",
- "aec/aec_rdft_neon.c",
- "aecm/aecm_core_neon.c",
+ "aec/aec_rdft_neon.cc",
+ "aecm/aecm_core_neon.cc",
"ns/nsx_core_neon.c",
]
@@ -285,5 +291,11 @@ if (rtc_build_with_neon) {
deps = [
"../../common_audio",
]
+
+ if (aec_debug_dump) {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
+ }
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
index 1d1e67636bf..4c109d7d9ce 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
@@ -14,10 +14,6 @@
#include "webrtc/modules/audio_processing/aec/aec_core.h"
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-#include <stdio.h>
-#endif
-
#include <algorithm>
#include <assert.h>
#include <math.h>
@@ -29,20 +25,45 @@
extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
}
+#include "webrtc/base/checks.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
-#include "webrtc/modules/audio_processing/logging/aec_logging.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-}
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+namespace {
+enum class DelaySource {
+ kSystemDelay, // The delay values come from the OS.
+ kDelayAgnostic, // The delay values come from the DA-AEC.
+};
+
+constexpr int kMinDelayLogValue = -200;
+constexpr int kMaxDelayLogValue = 200;
+constexpr int kNumDelayLogBuckets = 100;
+
+void MaybeLogDelayAdjustment(int moved_ms, DelaySource source) {
+ if (moved_ms == 0)
+ return;
+ switch (source) {
+ case DelaySource::kSystemDelay:
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AecDelayAdjustmentMsSystemValue",
+ moved_ms, kMinDelayLogValue, kMaxDelayLogValue,
+ kNumDelayLogBuckets);
+ return;
+ case DelaySource::kDelayAgnostic:
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AecDelayAdjustmentMsAgnosticValue",
+ moved_ms, kMinDelayLogValue, kMaxDelayLogValue,
+ kNumDelayLogBuckets);
+ return;
+ }
+}
+} // namespace
// Buffer size (samples)
static const size_t kBufSizePartitions = 250; // 1 second of audio in 16 kHz.
@@ -55,8 +76,8 @@ static const int kDelayMetricsAggregationWindow = 1250; // 5 seconds at 16 kHz.
// Divergence metric is based on audio level, which gets updated every
// |kCountLen + 1| * 10 milliseconds. Divergence metric takes the statistics of
// |kDivergentFilterFractionAggregationWindowSize| samples. Current value
-// corresponds to 0.5 seconds at 16 kHz.
-static const int kDivergentFilterFractionAggregationWindowSize = 25;
+// corresponds to 1 second at 16 kHz.
+static const int kDivergentFilterFractionAggregationWindowSize = 50;
// Quantities to control H band scaling for SWB input
static const float cnScaleHband = 0.4f; // scale for comfort noise in H band.
@@ -136,16 +157,13 @@ const float WebRtcAec_kNormalSmoothingCoefficients[2][2] = {{0.9f, 0.1f},
// Number of partitions forming the NLP's "preferred" bands.
enum { kPrefBandSize = 24 };
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-extern int webrtc_aec_instance_count;
-#endif
-
WebRtcAecFilterFar WebRtcAec_FilterFar;
WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
-WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
-WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
-WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
+WebRtcAecOverdrive WebRtcAec_Overdrive;
+WebRtcAecSuppress WebRtcAec_Suppress;
+WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
WebRtcAecWindowData WebRtcAec_WindowData;
@@ -210,7 +228,10 @@ void DivergentFilterFraction::Clear() {
}
// TODO(minyue): Moving some initialization from WebRtcAec_CreateAec() to ctor.
-AecCore::AecCore() = default;
+AecCore::AecCore(int instance_index)
+ : data_dumper(new ApmDataDumper(instance_index)) {}
+
+AecCore::~AecCore() {}
static int CmpFloat(const void* a, const void* b) {
const float* da = (const float*)a;
@@ -316,19 +337,21 @@ static void FilterAdaptation(
}
}
-static void OverdriveAndSuppress(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
- int i;
- for (i = 0; i < PART_LEN1; i++) {
+static void Overdrive(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Weight subbands
if (hNl[i] > hNlFb) {
hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+static void Suppress(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Suppress error signal
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -339,7 +362,9 @@ static void OverdriveAndSuppress(AecCore* aec,
}
}
-static int PartitionDelay(const AecCore* aec) {
+static int PartitionDelay(int num_partitions,
+ float h_fft_buf[2]
+ [kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -348,13 +373,13 @@ static int PartitionDelay(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
for (j = 0; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -365,10 +390,46 @@ static int PartitionDelay(const AecCore* aec) {
return delay;
}
+// Update metric with 10 * log10(numerator / denominator).
+static void UpdateLogRatioMetric(Stats* metric, float numerator,
+ float denominator) {
+ RTC_DCHECK(metric);
+ RTC_CHECK(numerator >= 0);
+ RTC_CHECK(denominator >= 0);
+
+ const float log_numerator = log10(numerator + 1e-10f);
+ const float log_denominator = log10(denominator + 1e-10f);
+ metric->instant = 10.0f * (log_numerator - log_denominator);
+
+ // Max.
+ if (metric->instant > metric->max)
+ metric->max = metric->instant;
+
+ // Min.
+ if (metric->instant < metric->min)
+ metric->min = metric->instant;
+
+ // Average.
+ metric->counter++;
+ // This is to protect overflow, which should almost never happen.
+ RTC_CHECK_NE(0u, metric->counter);
+ metric->sum += metric->instant;
+ metric->average = metric->sum / metric->counter;
+
+ // Upper mean.
+ if (metric->instant > metric->average) {
+ metric->hicounter++;
+ // This is to protect overflow, which should almost never happen.
+ RTC_CHECK_NE(0u, metric->hicounter);
+ metric->hisum += metric->instant;
+ metric->himean = metric->hisum / metric->hicounter;
+ }
+}
+
// Threshold to protect against the ill-effects of a zero far-end.
const float WebRtcAec_kMinFarendPSD = 15;
-// Updates the following smoothed Power Spectral Densities (PSD):
+// Updates the following smoothed Power Spectral Densities (PSD):
// - sd : near-end
// - se : residual echo
// - sx : far-end
@@ -377,53 +438,60 @@ const float WebRtcAec_kMinFarendPSD = 15;
//
// In addition to updating the PSDs, also the filter diverge state is
// determined.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectra(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
for (i = 0; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -454,26 +522,17 @@ __inline static void StoreAsComplex(const float* data,
data_complex[1][PART_LEN] = 0;
}
-static void SubbandCoherence(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherence(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
- int i;
-
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
+ float* cohxd) {
// Subband coherence
- for (i = 0; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ for (int i = 0; i < PART_LEN1; i++) {
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
@@ -487,94 +546,67 @@ static void GetHighbandGain(const float* lambda, float* nlpGainHband) {
*nlpGainHband /= static_cast<float>(PART_LEN1 - 1 - freqAvgIc);
}
-static void ComfortNoise(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda) {
- int i, num;
- float rand[PART_LEN];
- float noise, noiseAvg, tmp, tmpAvg;
+static void GenerateComplexNoise(uint32_t* seed, float noise[2][PART_LEN1]) {
+ const float kPi2 = 6.28318530717959f;
int16_t randW16[PART_LEN];
- float u[2][PART_LEN1];
+ WebRtcSpl_RandUArray(randW16, PART_LEN, seed);
- const float pi2 = 6.28318530717959f;
-
- // Generate a uniform random array on [0 1]
- WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
- for (i = 0; i < PART_LEN; i++) {
- rand[i] = static_cast<float>(randW16[i]) / 32768;
+ noise[0][0] = 0;
+ noise[1][0] = 0;
+ for (size_t i = 1; i < PART_LEN1; i++) {
+ float tmp = kPi2 * randW16[i - 1] / 32768.f;
+ noise[0][i] = cosf(tmp);
+ noise[1][i] = -sinf(tmp);
}
+ noise[1][PART_LEN] = 0;
+}
- // Reject LF noise
- u[0][0] = 0;
- u[1][0] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- noise = sqrtf(noisePow[i]);
- u[0][i] = noise * cosf(tmp);
- u[1][i] = -noise * sinf(tmp);
- }
- u[1][PART_LEN] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // This is the proper weighting to match the background noise power
- tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- // tmp = 1 - lambda[i];
- efw[0][i] += tmp * u[0][i];
- efw[1][i] += tmp * u[1][i];
- }
-
- // For H band comfort noise
- // TODO(peah): don't compute noise and "tmp" twice. Use the previous results.
- noiseAvg = 0.0;
- tmpAvg = 0.0;
- num = 0;
- if (aec->num_bands > 1) {
- // average noise scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- noiseAvg += sqrtf(noisePow[i]);
- }
- noiseAvg /= static_cast<float>(num);
-
- // average nlp scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements
- // we're summing.
- num = 0;
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- }
- tmpAvg /= static_cast<float>(num);
-
- // Use average noise for H band
- // TODO(peah): we should probably have a new random vector here.
- // Reject LF noise
- u[0][0] = 0;
- u[1][0] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- // Use average noise for H band
- u[0][i] = noiseAvg * static_cast<float>(cos(tmp));
- u[1][i] = -noiseAvg * static_cast<float>(sin(tmp));
+static void ComfortNoise(bool generate_high_frequency_noise,
+ uint32_t* seed,
+ float e_fft[2][PART_LEN1],
+ float high_frequency_comfort_noise[2][PART_LEN1],
+ const float* noise_spectrum,
+ const float* suppressor_gain) {
+ float complex_noise[2][PART_LEN1];
+
+ GenerateComplexNoise(seed, complex_noise);
+
+ // Shape, scale and add comfort noise.
+ for (int i = 1; i < PART_LEN1; ++i) {
+ float noise_scaling =
+ sqrtf(WEBRTC_SPL_MAX(1 - suppressor_gain[i] * suppressor_gain[i], 0)) *
+ sqrtf(noise_spectrum[i]);
+ e_fft[0][i] += noise_scaling * complex_noise[0][i];
+ e_fft[1][i] += noise_scaling * complex_noise[1][i];
+ }
+
+ // Form comfort noise for higher frequencies.
+ if (generate_high_frequency_noise) {
+ // Compute average noise power and nlp gain over the second half of freq
+ // spectrum (i.e., 4->8khz).
+ int start_avg_band = PART_LEN1 / 2;
+ float upper_bands_noise_power = 0.f;
+ float upper_bands_suppressor_gain = 0.f;
+ for (int i = start_avg_band; i < PART_LEN1; ++i) {
+ upper_bands_noise_power += sqrtf(noise_spectrum[i]);
+ upper_bands_suppressor_gain +=
+ sqrtf(WEBRTC_SPL_MAX(1 - suppressor_gain[i] * suppressor_gain[i], 0));
}
- u[1][PART_LEN] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // Use average NLP weight for H band
- comfortNoiseHband[0][i] = tmpAvg * u[0][i];
- comfortNoiseHband[1][i] = tmpAvg * u[1][i];
+ upper_bands_noise_power /= (PART_LEN1 - start_avg_band);
+ upper_bands_suppressor_gain /= (PART_LEN1 - start_avg_band);
+
+ // Shape, scale and add comfort noise.
+ float noise_scaling = upper_bands_suppressor_gain * upper_bands_noise_power;
+ high_frequency_comfort_noise[0][0] = 0;
+ high_frequency_comfort_noise[1][0] = 0;
+ for (int i = 1; i < PART_LEN1; ++i) {
+ high_frequency_comfort_noise[0][i] = noise_scaling * complex_noise[0][i];
+ high_frequency_comfort_noise[1][i] = noise_scaling * complex_noise[1][i];
}
+ high_frequency_comfort_noise[1][PART_LEN] = 0;
} else {
- memset(comfortNoiseHband, 0,
- 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
+ memset(high_frequency_comfort_noise, 0,
+ 2 * PART_LEN1 * sizeof(high_frequency_comfort_noise[0][0]));
}
}
@@ -638,16 +670,12 @@ static void UpdateLevel(PowerLevel* level, float power) {
}
static void UpdateMetrics(AecCore* aec) {
- float dtmp;
-
const float actThresholdNoisy = 8.0f;
const float actThresholdClean = 40.0f;
- const float safety = 0.99995f;
const float noisyPower = 300000.0f;
float actThreshold;
- float echo, suppressedEcho;
if (aec->echoState) { // Check if echo is likely present
aec->stateCounter++;
@@ -674,95 +702,22 @@ static void UpdateMetrics(AecCore* aec) {
(aec->farlevel.framelevel.EndOfBlock()) &&
(far_average_level > (actThreshold * aec->farlevel.minlevel))) {
+ // ERL: error return loss.
const float near_average_level =
aec->nearlevel.averagelevel.GetLatestMean();
+ UpdateLogRatioMetric(&aec->erl, far_average_level, near_average_level);
- // Subtract noise power
- echo = near_average_level - safety * aec->nearlevel.minlevel;
-
- // ERL
- dtmp = 10 * static_cast<float>(log10(far_average_level /
- near_average_level + 1e-10f));
-
- aec->erl.instant = dtmp;
- if (dtmp > aec->erl.max) {
- aec->erl.max = dtmp;
- }
-
- if (dtmp < aec->erl.min) {
- aec->erl.min = dtmp;
- }
-
- aec->erl.counter++;
- aec->erl.sum += dtmp;
- aec->erl.average = aec->erl.sum / aec->erl.counter;
-
- // Upper mean
- if (dtmp > aec->erl.average) {
- aec->erl.hicounter++;
- aec->erl.hisum += dtmp;
- aec->erl.himean = aec->erl.hisum / aec->erl.hicounter;
- }
-
- // A_NLP
+ // A_NLP: error return loss enhanced before the nonlinear suppression.
const float linout_average_level =
aec->linoutlevel.averagelevel.GetLatestMean();
- dtmp = 10 * static_cast<float>(log10(near_average_level /
- linout_average_level + 1e-10f));
-
- // subtract noise power
- suppressedEcho =
- linout_average_level - safety * aec->linoutlevel.minlevel;
-
- aec->aNlp.instant =
- 10 * static_cast<float>(log10(echo / suppressedEcho + 1e-10f));
-
- if (dtmp > aec->aNlp.max) {
- aec->aNlp.max = dtmp;
- }
+ UpdateLogRatioMetric(&aec->aNlp, near_average_level,
+ linout_average_level);
- if (dtmp < aec->aNlp.min) {
- aec->aNlp.min = dtmp;
- }
-
- aec->aNlp.counter++;
- aec->aNlp.sum += dtmp;
- aec->aNlp.average = aec->aNlp.sum / aec->aNlp.counter;
-
- // Upper mean
- if (dtmp > aec->aNlp.average) {
- aec->aNlp.hicounter++;
- aec->aNlp.hisum += dtmp;
- aec->aNlp.himean = aec->aNlp.hisum / aec->aNlp.hicounter;
- }
-
- // ERLE
+ // ERLE: error return loss enhanced.
const float nlpout_average_level =
aec->nlpoutlevel.averagelevel.GetLatestMean();
- // subtract noise power
- suppressedEcho =
- nlpout_average_level - safety * aec->nlpoutlevel.minlevel;
- dtmp = 10 * static_cast<float>(log10(echo / suppressedEcho + 1e-10f));
-
- aec->erle.instant = dtmp;
- if (dtmp > aec->erle.max) {
- aec->erle.max = dtmp;
- }
-
- if (dtmp < aec->erle.min) {
- aec->erle.min = dtmp;
- }
-
- aec->erle.counter++;
- aec->erle.sum += dtmp;
- aec->erle.average = aec->erle.sum / aec->erle.counter;
-
- // Upper mean
- if (dtmp > aec->erle.average) {
- aec->erle.hicounter++;
- aec->erle.hisum += dtmp;
- aec->erle.himean = aec->erle.hisum / aec->erle.hicounter;
- }
+ UpdateLogRatioMetric(&aec->erle, near_average_level,
+ nlpout_average_level);
}
aec->stateCounter = 0;
@@ -963,9 +918,9 @@ static void RegressorPower(int num_partitions,
}
}
-static void EchoSubtraction(AecCore* aec,
- int num_partitions,
+static void EchoSubtraction(int num_partitions,
int extended_filter_enabled,
+ int* extreme_filter_divergence,
float filter_step_size,
float error_threshold,
float* x_fft,
@@ -1001,9 +956,10 @@ static void EchoSubtraction(AecCore* aec,
// Conditionally reset the echo subtraction filter if the filter has diverged
// significantly.
- if (!aec->extended_filter_enabled && aec->extreme_filter_divergence) {
- memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
- aec->extreme_filter_divergence = 0;
+ if (!extended_filter_enabled && *extreme_filter_divergence) {
+ memset(h_fft_buf, 0,
+ 2 * kExtendedNumPartitions * PART_LEN1 * sizeof(h_fft_buf[0][0]));
+ *extreme_filter_divergence = 0;
}
// Produce echo estimate s_fft.
@@ -1024,9 +980,6 @@ static void EchoSubtraction(AecCore* aec,
memcpy(e_extended + PART_LEN, e, sizeof(float) * PART_LEN);
Fft(e_extended, e_fft);
- RTC_AEC_DEBUG_RAW_WRITE(aec->e_fft_file, &e_fft[0][0],
- sizeof(e_fft[0][0]) * PART_LEN1 * 2);
-
// Scale error signal inversely with far power.
WebRtcAec_ScaleErrorSignal(filter_step_size, error_threshold, x_pow, e_fft);
WebRtcAec_FilterAdaptation(num_partitions, *x_fft_buf_block_pos, x_fft_buf,
@@ -1034,92 +987,30 @@ static void EchoSubtraction(AecCore* aec,
memcpy(echo_subtractor_output, e, sizeof(float) * PART_LEN);
}
-static void EchoSuppression(AecCore* aec,
- float farend[PART_LEN2],
- float* echo_subtractor_output,
- float* output,
- float* const* outputH) {
- float efw[2][PART_LEN1];
- float xfw[2][PART_LEN1];
- float dfw[2][PART_LEN1];
- float comfortNoiseHband[2][PART_LEN1];
- float fft[PART_LEN2];
- float nlpGainHband;
- int i;
- size_t j;
-
- // Coherence and non-linear filter
- float cohde[PART_LEN1], cohxd[PART_LEN1];
+static void FormSuppressionGain(AecCore* aec,
+ float cohde[PART_LEN1],
+ float cohxd[PART_LEN1],
+ float hNl[PART_LEN1]) {
float hNlDeAvg, hNlXdAvg;
- float hNl[PART_LEN1];
float hNlPref[kPrefBandSize];
float hNlFb = 0, hNlFbLow = 0;
- const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
const int prefBandSize = kPrefBandSize / aec->mult;
+ const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
const int minPrefBand = 4 / aec->mult;
// Power estimate smoothing coefficients.
const float* min_overdrive = aec->extended_filter_enabled
? kExtendedMinOverDrive
: kNormalMinOverDrive;
- // Filter energy
- const int delayEstInterval = 10 * aec->mult;
-
- float* xfw_ptr = NULL;
-
- // Update eBuf with echo subtractor output.
- memcpy(aec->eBuf + PART_LEN, echo_subtractor_output,
- sizeof(float) * PART_LEN);
-
- // Analysis filter banks for the echo suppressor.
- // Windowed near-end ffts.
- WindowData(fft, aec->dBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, dfw);
-
- // Windowed echo suppressor output ffts.
- WindowData(fft, aec->eBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, efw);
-
- // NLP
-
- // Convert far-end partition to the frequency domain with windowing.
- WindowData(fft, farend);
- Fft(fft, xfw);
- xfw_ptr = &xfw[0][0];
-
- // Buffer far.
- memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
-
- aec->delayEstCtr++;
- if (aec->delayEstCtr == delayEstInterval) {
- aec->delayEstCtr = 0;
- aec->delayIdx = WebRtcAec_PartitionDelay(aec);
- }
-
- // Use delayed far.
- memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1,
- sizeof(xfw[0][0]) * 2 * PART_LEN1);
-
- WebRtcAec_SubbandCoherence(aec, efw, dfw, xfw, fft, cohde, cohxd,
- &aec->extreme_filter_divergence);
-
- // Select the microphone signal as output if the filter is deemed to have
- // diverged.
- if (aec->divergeState) {
- memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
- }
-
hNlXdAvg = 0;
- for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+ for (int i = minPrefBand; i < prefBandSize + minPrefBand; ++i) {
hNlXdAvg += cohxd[i];
}
hNlXdAvg /= prefBandSize;
hNlXdAvg = 1 - hNlXdAvg;
hNlDeAvg = 0;
- for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+ for (int i = minPrefBand; i < prefBandSize + minPrefBand; ++i) {
hNlDeAvg += cohde[i];
}
hNlDeAvg /= prefBandSize;
@@ -1139,11 +1030,11 @@ static void EchoSuppression(AecCore* aec,
aec->overDrive = min_overdrive[aec->nlp_mode];
if (aec->stNearState == 1) {
- memcpy(hNl, cohde, sizeof(hNl));
+ memcpy(hNl, cohde, sizeof(hNl[0]) * PART_LEN1);
hNlFb = hNlDeAvg;
hNlFbLow = hNlDeAvg;
} else {
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
hNl[i] = 1 - cohxd[i];
}
hNlFb = hNlXdAvg;
@@ -1152,12 +1043,12 @@ static void EchoSuppression(AecCore* aec,
} else {
if (aec->stNearState == 1) {
aec->echoState = 0;
- memcpy(hNl, cohde, sizeof(hNl));
+ memcpy(hNl, cohde, sizeof(hNl[0]) * PART_LEN1);
hNlFb = hNlDeAvg;
hNlFbLow = hNlDeAvg;
} else {
aec->echoState = 1;
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
hNl[i] = WEBRTC_SPL_MIN(cohde[i], 1 - cohxd[i]);
}
@@ -1197,16 +1088,96 @@ static void EchoSuppression(AecCore* aec,
}
// Smooth the overdrive.
- if (aec->overDrive < aec->overDriveSm) {
- aec->overDriveSm = 0.99f * aec->overDriveSm + 0.01f * aec->overDrive;
+ if (aec->overDrive < aec->overdrive_scaling) {
+ aec->overdrive_scaling =
+ 0.99f * aec->overdrive_scaling + 0.01f * aec->overDrive;
} else {
- aec->overDriveSm = 0.9f * aec->overDriveSm + 0.1f * aec->overDrive;
+ aec->overdrive_scaling =
+ 0.9f * aec->overdrive_scaling + 0.1f * aec->overDrive;
+ }
+
+ // Apply the overdrive.
+ WebRtcAec_Overdrive(aec->overdrive_scaling, hNlFb, hNl);
+}
+
+static void EchoSuppression(AecCore* aec,
+ float farend[PART_LEN2],
+ float* echo_subtractor_output,
+ float* output,
+ float* const* outputH) {
+ float efw[2][PART_LEN1];
+ float xfw[2][PART_LEN1];
+ float dfw[2][PART_LEN1];
+ float comfortNoiseHband[2][PART_LEN1];
+ float fft[PART_LEN2];
+ float nlpGainHband;
+ int i;
+ size_t j;
+
+ // Coherence and non-linear filter
+ float cohde[PART_LEN1], cohxd[PART_LEN1];
+ float hNl[PART_LEN1];
+
+ // Filter energy
+ const int delayEstInterval = 10 * aec->mult;
+
+ float* xfw_ptr = NULL;
+
+ // Update eBuf with echo subtractor output.
+ memcpy(aec->eBuf + PART_LEN, echo_subtractor_output,
+ sizeof(float) * PART_LEN);
+
+ // Analysis filter banks for the echo suppressor.
+ // Windowed near-end ffts.
+ WindowData(fft, aec->dBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, dfw);
+
+ // Windowed echo suppressor output ffts.
+ WindowData(fft, aec->eBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, efw);
+
+ // NLP
+
+ // Convert far-end partition to the frequency domain with windowing.
+ WindowData(fft, farend);
+ Fft(fft, xfw);
+ xfw_ptr = &xfw[0][0];
+
+ // Buffer far.
+ memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
+
+ aec->delayEstCtr++;
+ if (aec->delayEstCtr == delayEstInterval) {
+ aec->delayEstCtr = 0;
+ aec->delayIdx = WebRtcAec_PartitionDelay(aec->num_partitions, aec->wfBuf);
+ }
+
+ // Use delayed far.
+ memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1,
+ sizeof(xfw[0][0]) * 2 * PART_LEN1);
+
+ WebRtcAec_UpdateCoherenceSpectra(aec->mult, aec->extended_filter_enabled == 1,
+ efw, dfw, xfw, &aec->coherence_state,
+ &aec->divergeState,
+ &aec->extreme_filter_divergence);
+
+ WebRtcAec_ComputeCoherence(&aec->coherence_state, cohde, cohxd);
+
+ // Select the microphone signal as output if the filter is deemed to have
+ // diverged.
+ if (aec->divergeState) {
+ memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
}
- WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw);
+ FormSuppressionGain(aec, cohde, cohxd, hNl);
+
+ WebRtcAec_Suppress(hNl, efw);
// Add comfort noise.
- WebRtcAec_ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
+ ComfortNoise(aec->num_bands > 1, &aec->seed, efw, comfortNoiseHband,
+ aec->noisePow, hNl);
// Inverse error fft.
ScaledInverseFft(efw, fft, 2.0f, 1);
@@ -1315,15 +1286,10 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_ReadBuffer(aec->far_time_buf, reinterpret_cast<void**>(&farend_ptr),
farend, 1);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- // TODO(minyue): |farend_ptr| starts from buffered samples. This will be
- // modified when |aec->far_time_buf| is revised.
- RTC_AEC_DEBUG_WAV_WRITE(aec->farFile, &farend_ptr[PART_LEN], PART_LEN);
-
- RTC_AEC_DEBUG_WAV_WRITE(aec->nearFile, nearend_ptr, PART_LEN);
- }
-#endif
+ aec->data_dumper->DumpWav("aec_far", PART_LEN, &farend_ptr[PART_LEN],
+ std::min(aec->sampFreq, 16000), 1);
+ aec->data_dumper->DumpWav("aec_near", PART_LEN, nearend_ptr,
+ std::min(aec->sampFreq, 16000), 1);
if (aec->metricsMode == 1) {
// Update power levels
@@ -1417,12 +1383,14 @@ static void ProcessBlock(AecCore* aec) {
}
// Perform echo subtraction.
- EchoSubtraction(aec, aec->num_partitions, aec->extended_filter_enabled,
- aec->filter_step_size, aec->error_threshold, &x_fft[0][0],
- &aec->xfBufBlockPos, aec->xfBuf, nearend_ptr, aec->xPow,
- aec->wfBuf, echo_subtractor_output);
+ EchoSubtraction(aec->num_partitions, aec->extended_filter_enabled,
+ &aec->extreme_filter_divergence, aec->filter_step_size,
+ aec->error_threshold, &x_fft[0][0], &aec->xfBufBlockPos,
+ aec->xfBuf, nearend_ptr, aec->xPow, aec->wfBuf,
+ echo_subtractor_output);
- RTC_AEC_DEBUG_WAV_WRITE(aec->outLinearFile, echo_subtractor_output, PART_LEN);
+ aec->data_dumper->DumpWav("aec_out_linear", PART_LEN, echo_subtractor_output,
+ std::min(aec->sampFreq, 16000), 1);
if (aec->metricsMode == 1) {
UpdateLevel(&aec->linoutlevel,
@@ -1444,12 +1412,14 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_WriteBuffer(aec->outFrBufH[i], outputH[i], PART_LEN);
}
- RTC_AEC_DEBUG_WAV_WRITE(aec->outFile, output, PART_LEN);
+ aec->data_dumper->DumpWav("aec_out", PART_LEN, output,
+ std::min(aec->sampFreq, 16000), 1);
}
-AecCore* WebRtcAec_CreateAec() {
+AecCore* WebRtcAec_CreateAec(int instance_count) {
int i;
- AecCore* aec = new AecCore;
+ AecCore* aec = new AecCore(instance_count);
+
if (!aec) {
return NULL;
}
@@ -1493,12 +1463,6 @@ AecCore* WebRtcAec_CreateAec() {
return NULL;
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- aec->instance_index = webrtc_aec_instance_count;
-
- aec->farFile = aec->nearFile = aec->outFile = aec->outLinearFile = NULL;
- aec->debug_dump_count = 0;
-#endif
aec->delay_estimator_farend =
WebRtc_CreateDelayEstimatorFarend(PART_LEN1, kHistorySizeBlocks);
if (aec->delay_estimator_farend == NULL) {
@@ -1530,9 +1494,10 @@ AecCore* WebRtcAec_CreateAec() {
WebRtcAec_FilterFar = FilterFar;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
WebRtcAec_FilterAdaptation = FilterAdaptation;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
- WebRtcAec_ComfortNoise = ComfortNoise;
- WebRtcAec_SubbandCoherence = SubbandCoherence;
+ WebRtcAec_Overdrive = Overdrive;
+ WebRtcAec_Suppress = Suppress;
+ WebRtcAec_ComputeCoherence = ComputeCoherence;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectra;
WebRtcAec_StoreAsComplex = StoreAsComplex;
WebRtcAec_PartitionDelay = PartitionDelay;
WebRtcAec_WindowData = WindowData;
@@ -1549,10 +1514,6 @@ AecCore* WebRtcAec_CreateAec() {
#if defined(WEBRTC_HAS_NEON)
WebRtcAec_InitAec_neon();
-#elif defined(WEBRTC_DETECT_NEON)
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- WebRtcAec_InitAec_neon();
- }
#endif
aec_rdft_init();
@@ -1576,12 +1537,6 @@ void WebRtcAec_FreeAec(AecCore* aec) {
WebRtc_FreeBuffer(aec->far_time_buf);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->farFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->nearFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->outFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->outLinearFile);
- RTC_AEC_DEBUG_RAW_CLOSE(aec->e_fft_file);
-
WebRtc_FreeDelayEstimator(aec->delay_estimator);
WebRtc_FreeDelayEstimatorFarend(aec->delay_estimator_farend);
@@ -1626,6 +1581,7 @@ static void SetErrorThreshold(AecCore* aec) {
int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
int i;
+ aec->data_dumper->InitiateNewSetOfRecordings();
aec->sampFreq = sampFreq;
@@ -1648,27 +1604,6 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
// Initialize far-end buffers.
WebRtc_InitBuffer(aec->far_time_buf);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- int process_rate = sampFreq > 16000 ? 16000 : sampFreq;
- RTC_AEC_DEBUG_WAV_REOPEN("aec_far", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->farFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_near", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->nearFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_out", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->outFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_out_linear", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->outLinearFile);
- }
-
- RTC_AEC_DEBUG_RAW_OPEN("aec_e_fft", aec->debug_dump_count, &aec->e_fft_file);
-
- ++aec->debug_dump_count;
-#endif
aec->system_delay = 0;
if (WebRtc_InitDelayEstimatorFarend(aec->delay_estimator_farend) != 0) {
@@ -1749,18 +1684,18 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
// doesn't change the output at all and yields 0.4% overall speedup.
memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
- memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1);
- memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1);
+ memset(aec->coherence_state.sde, 0, sizeof(complex_t) * PART_LEN1);
+ memset(aec->coherence_state.sxd, 0, sizeof(complex_t) * PART_LEN1);
memset(aec->xfwBuf, 0,
sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
- memset(aec->se, 0, sizeof(float) * PART_LEN1);
+ memset(aec->coherence_state.se, 0, sizeof(float) * PART_LEN1);
// To prevent numerical instability in the first block.
for (i = 0; i < PART_LEN1; i++) {
- aec->sd[i] = 1;
+ aec->coherence_state.sd[i] = 1;
}
for (i = 0; i < PART_LEN1; i++) {
- aec->sx[i] = 1;
+ aec->coherence_state.sx[i] = 1;
}
memset(aec->hNs, 0, sizeof(aec->hNs));
@@ -1772,7 +1707,7 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
aec->hNlNewMin = 0;
aec->hNlMinCtr = 0;
aec->overDrive = 2;
- aec->overDriveSm = 2;
+ aec->overdrive_scaling = 2;
aec->delayIdx = 0;
aec->stNearState = 0;
aec->echoState = 0;
@@ -1878,11 +1813,15 @@ void WebRtcAec_ProcessFrames(AecCore* aec,
// rounding, like -16.
int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN;
int moved_elements = WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+ MaybeLogDelayAdjustment(moved_elements * (aec->sampFreq == 8000 ? 8 : 4),
+ DelaySource::kSystemDelay);
aec->knownDelay -= moved_elements * PART_LEN;
} else {
// 2 b) Apply signal based delay correction.
int move_elements = SignalBasedDelayCorrection(aec);
int moved_elements = WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+ MaybeLogDelayAdjustment(moved_elements * (aec->sampFreq == 8000 ? 8 : 4),
+ DelaySource::kDelayAgnostic);
int far_near_buffer_diff =
WebRtc_available_read(aec->far_time_buf) -
WebRtc_available_read(aec->nearFrBuf) / PART_LEN;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
index bd5b283eca0..1ab20201860 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
@@ -17,6 +17,15 @@
#include <stddef.h>
+#include <memory>
+
+extern "C" {
+#include "webrtc/common_audio/ring_buffer.h"
+}
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_audio/wav_file.h"
+#include "webrtc/modules/audio_processing/aec/aec_common.h"
+#include "webrtc/modules/audio_processing/utility/block_mean_calculator.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -27,6 +36,8 @@ namespace webrtc {
#define PART_LEN2 (PART_LEN * 2) // Length of partition * 2
#define NUM_HIGH_BANDS_MAX 2 // Max number of high bands
+class ApmDataDumper;
+
typedef float complex_t[2];
// For performance reasons, some arrays of complex numbers are replaced by twice
// as long arrays of float, all the real parts followed by all the imaginary
@@ -47,20 +58,188 @@ typedef struct Stats {
float sum;
float hisum;
float himean;
- int counter;
- int hicounter;
+ size_t counter;
+ size_t hicounter;
} Stats;
-typedef struct AecCore AecCore;
+// Number of partitions for the extended filter mode. The first one is an enum
+// to be used in array declarations, as it represents the maximum filter length.
+enum { kExtendedNumPartitions = 32 };
+static const int kNormalNumPartitions = 12;
+
+// Delay estimator constants, used for logging and delay compensation if
+// if reported delays are disabled.
+enum { kLookaheadBlocks = 15 };
+enum {
+ // 500 ms for 16 kHz which is equivalent with the limit of reported delays.
+ kHistorySizeBlocks = 125
+};
+
+typedef struct PowerLevel {
+ PowerLevel();
+
+ BlockMeanCalculator framelevel;
+ BlockMeanCalculator averagelevel;
+ float minlevel;
+} PowerLevel;
+
+class DivergentFilterFraction {
+ public:
+ DivergentFilterFraction();
+
+ // Reset.
+ void Reset();
+
+ void AddObservation(const PowerLevel& nearlevel,
+ const PowerLevel& linoutlevel,
+ const PowerLevel& nlpoutlevel);
+
+ // Return the latest fraction.
+ float GetLatestFraction() const;
+
+ private:
+ // Clear all values added.
+ void Clear();
+
+ size_t count_;
+ size_t occurrence_;
+ float fraction_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(DivergentFilterFraction);
+};
+
+typedef struct CoherenceState {
+ complex_t sde[PART_LEN1]; // cross-psd of nearend and error
+ complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
+ float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
+} CoherenceState;
+
+struct AecCore {
+ explicit AecCore(int instance_index);
+ ~AecCore();
+
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ CoherenceState coherence_state;
+
+ int farBufWritePos, farBufReadPos;
+
+ int knownDelay;
+ int inSamples, outSamples;
+ int delayEstCtr;
+
+ RingBuffer* nearFrBuf;
+ RingBuffer* outFrBuf;
+
+ RingBuffer* nearFrBufH[NUM_HIGH_BANDS_MAX];
+ RingBuffer* outFrBufH[NUM_HIGH_BANDS_MAX];
-AecCore* WebRtcAec_CreateAec(); // Returns NULL on error.
+ float dBuf[PART_LEN2]; // nearend
+ float eBuf[PART_LEN2]; // error
+
+ float dBufH[NUM_HIGH_BANDS_MAX][PART_LEN2]; // nearend
+
+ float xPow[PART_LEN1];
+ float dPow[PART_LEN1];
+ float dMinPow[PART_LEN1];
+ float dInitMinPow[PART_LEN1];
+ float* noisePow;
+
+ float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer
+ float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft
+ // Farend windowed fft buffer.
+ complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1];
+
+ float hNs[PART_LEN1];
+ float hNlFbMin, hNlFbLocalMin;
+ float hNlXdAvgMin;
+ int hNlNewMin, hNlMinCtr;
+ float overDrive;
+ float overdrive_scaling;
+ int nlp_mode;
+ float outBuf[PART_LEN];
+ int delayIdx;
+
+ short stNearState, echoState;
+ short divergeState;
+
+ int xfBufBlockPos;
+
+ RingBuffer* far_time_buf;
+
+ int system_delay; // Current system delay buffered in AEC.
+
+ int mult; // sampling frequency multiple
+ int sampFreq = 16000;
+ size_t num_bands;
+ uint32_t seed;
+
+ float filter_step_size; // stepsize
+ float error_threshold; // error threshold
+
+ int noiseEstCtr;
+
+ PowerLevel farlevel;
+ PowerLevel nearlevel;
+ PowerLevel linoutlevel;
+ PowerLevel nlpoutlevel;
+
+ int metricsMode;
+ int stateCounter;
+ Stats erl;
+ Stats erle;
+ Stats aNlp;
+ Stats rerl;
+ DivergentFilterFraction divergent_filter_fraction;
+
+ // Quantities to control H band scaling for SWB input
+ int freq_avg_ic; // initial bin for averaging nlp gain
+ int flag_Hband_cn; // for comfort noise
+ float cn_scale_Hband; // scale for comfort noise in H band
+
+ int delay_metrics_delivered;
+ int delay_histogram[kHistorySizeBlocks];
+ int num_delay_values;
+ int delay_median;
+ int delay_std;
+ float fraction_poor_delays;
+ int delay_logging_enabled;
+ void* delay_estimator_farend;
+ void* delay_estimator;
+ // Variables associated with delay correction through signal based delay
+ // estimation feedback.
+ int signal_delay_correction;
+ int previous_delay;
+ int delay_correction_count;
+ int shift_offset;
+ float delay_quality_threshold;
+ int frame_count;
+
+ // 0 = delay agnostic mode (signal based delay correction) disabled.
+ // Otherwise enabled.
+ int delay_agnostic_enabled;
+ // 1 = extended filter mode enabled, 0 = disabled.
+ int extended_filter_enabled;
+ // 1 = next generation aec mode enabled, 0 = disabled.
+ int aec3_enabled;
+ bool refined_adaptive_filter_enabled;
+
+ // Runtime selection of number of filter partitions.
+ int num_partitions;
+
+ // Flag that extreme filter divergence has been detected by the Echo
+ // Suppressor.
+ int extreme_filter_divergence;
+};
+
+AecCore* WebRtcAec_CreateAec(int instance_count); // Returns NULL on error.
void WebRtcAec_FreeAec(AecCore* aec);
int WebRtcAec_InitAec(AecCore* aec, int sampFreq);
void WebRtcAec_InitAec_SSE2(void);
#if defined(MIPS_FPU_LE)
void WebRtcAec_InitAec_mips(void);
#endif
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcAec_InitAec_neon(void);
#endif
@@ -97,9 +276,6 @@ void WebRtcAec_GetEchoStats(AecCore* self,
Stats* erle,
Stats* a_nlp,
float* divergent_filter_fraction);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-void* WebRtcAec_far_time_buf(AecCore* self);
-#endif
// Sets local configuration modes.
void WebRtcAec_SetConfigCore(AecCore* self,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
index 1f7b6b541fa..d4fad9e5e63 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
@@ -11,12 +11,16 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
+#include <memory>
+
extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
}
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/aec/aec_common.h"
#include "webrtc/modules/audio_processing/aec/aec_core.h"
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/modules/audio_processing/utility/block_mean_calculator.h"
#include "webrtc/typedefs.h"
@@ -68,8 +72,19 @@ class DivergentFilterFraction {
RTC_DISALLOW_COPY_AND_ASSIGN(DivergentFilterFraction);
};
+typedef struct CoherenceState {
+ complex_t sde[PART_LEN1]; // cross-psd of nearend and error
+ complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
+ float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
+} CoherenceState;
+
struct AecCore {
- AecCore();
+ explicit AecCore(int instance_index);
+ ~AecCore();
+
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ CoherenceState coherence_state;
int farBufWritePos, farBufReadPos;
@@ -96,17 +111,15 @@ struct AecCore {
float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer
float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft
- complex_t sde[PART_LEN1]; // cross-psd of nearend and error
- complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
// Farend windowed fft buffer.
complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1];
- float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
float hNs[PART_LEN1];
float hNlFbMin, hNlFbLocalMin;
float hNlXdAvgMin;
int hNlNewMin, hNlMinCtr;
- float overDrive, overDriveSm;
+ float overDrive;
+ float overdrive_scaling;
int nlp_mode;
float outBuf[PART_LEN];
int delayIdx;
@@ -181,22 +194,6 @@ struct AecCore {
// Flag that extreme filter divergence has been detected by the Echo
// Suppressor.
int extreme_filter_divergence;
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- // Sequence number of this AEC instance, so that different instances can
- // choose different dump file names.
- int instance_index;
-
- // Number of times we've restarted dumping; used to pick new dump file names
- // each time.
- int debug_dump_count;
-
- rtc_WavWriter* farFile;
- rtc_WavWriter* nearFile;
- rtc_WavWriter* outFile;
- rtc_WavWriter* outLinearFile;
- FILE* e_fft_file;
-#endif
};
typedef void (*WebRtcAecFilterFar)(
@@ -218,30 +215,34 @@ typedef void (*WebRtcAecFilterAdaptation)(
float e_fft[2][PART_LEN1],
float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
extern WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
-typedef void (*WebRtcAecOverdriveAndSuppress)(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]);
-extern WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
-
-typedef void (*WebRtcAecComfortNoise)(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda);
-extern WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
-
-typedef void (*WebRtcAecSubBandCoherence)(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
- float* cohde,
- float* cohxd,
- int* extreme_filter_divergence);
-extern WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
-typedef int (*WebRtcAecPartitionDelay)(const AecCore* aec);
+typedef void (*WebRtcAecOverdrive)(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]);
+extern WebRtcAecOverdrive WebRtcAec_Overdrive;
+
+typedef void (*WebRtcAecSuppress)(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]);
+extern WebRtcAecSuppress WebRtcAec_Suppress;
+
+typedef void (*WebRtcAecComputeCoherence)(const CoherenceState* coherence_state,
+ float* cohde,
+ float* cohxd);
+extern WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+
+typedef void (*WebRtcAecUpdateCoherenceSpectra)(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence);
+extern WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
+
+typedef int (*WebRtcAecPartitionDelay)(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
extern WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
typedef void (*WebRtcAecStoreAsComplex)(const float* data,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
index 5c6d8ebb73a..a9b5cd4e60f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
@@ -19,314 +19,14 @@
extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
extern const float WebRtcAec_weightCurve[65];
extern const float WebRtcAec_overDriveCurve[65];
-void WebRtcAec_ComfortNoise_mips(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda) {
- int i, num;
- float rand[PART_LEN];
- float noise, noiseAvg, tmp, tmpAvg;
- int16_t randW16[PART_LEN];
- complex_t u[PART_LEN1];
-
- const float pi2 = 6.28318530717959f;
- const float pi2t = pi2 / 32768;
-
- // Generate a uniform random array on [0 1]
- WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
-
- int16_t* randWptr = randW16;
- float randTemp, randTemp2, randTemp3, randTemp4;
- int32_t tmp1s, tmp2s, tmp3s, tmp4s;
-
- for (i = 0; i < PART_LEN; i += 4) {
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "lh %[tmp1s], 0(%[randWptr]) \n\t"
- "lh %[tmp2s], 2(%[randWptr]) \n\t"
- "lh %[tmp3s], 4(%[randWptr]) \n\t"
- "lh %[tmp4s], 6(%[randWptr]) \n\t"
- "mtc1 %[tmp1s], %[randTemp] \n\t"
- "mtc1 %[tmp2s], %[randTemp2] \n\t"
- "mtc1 %[tmp3s], %[randTemp3] \n\t"
- "mtc1 %[tmp4s], %[randTemp4] \n\t"
- "cvt.s.w %[randTemp], %[randTemp] \n\t"
- "cvt.s.w %[randTemp2], %[randTemp2] \n\t"
- "cvt.s.w %[randTemp3], %[randTemp3] \n\t"
- "cvt.s.w %[randTemp4], %[randTemp4] \n\t"
- "addiu %[randWptr], %[randWptr], 8 \n\t"
- "mul.s %[randTemp], %[randTemp], %[pi2t] \n\t"
- "mul.s %[randTemp2], %[randTemp2], %[pi2t] \n\t"
- "mul.s %[randTemp3], %[randTemp3], %[pi2t] \n\t"
- "mul.s %[randTemp4], %[randTemp4], %[pi2t] \n\t"
- ".set pop \n\t"
- : [randWptr] "+r" (randWptr), [randTemp] "=&f" (randTemp),
- [randTemp2] "=&f" (randTemp2), [randTemp3] "=&f" (randTemp3),
- [randTemp4] "=&f" (randTemp4), [tmp1s] "=&r" (tmp1s),
- [tmp2s] "=&r" (tmp2s), [tmp3s] "=&r" (tmp3s),
- [tmp4s] "=&r" (tmp4s)
- : [pi2t] "f" (pi2t)
- : "memory");
-
- u[i + 1][0] = cosf(randTemp);
- u[i + 1][1] = sinf(randTemp);
- u[i + 2][0] = cosf(randTemp2);
- u[i + 2][1] = sinf(randTemp2);
- u[i + 3][0] = cosf(randTemp3);
- u[i + 3][1] = sinf(randTemp3);
- u[i + 4][0] = cosf(randTemp4);
- u[i + 4][1] = sinf(randTemp4);
- }
-
- // Reject LF noise
- float* u_ptr = &u[1][0];
- float noise2, noise3, noise4;
- float tmp1f, tmp2f, tmp3f, tmp4f, tmp5f, tmp6f, tmp7f, tmp8f;
-
- u[0][0] = 0;
- u[0][1] = 0;
- for (i = 1; i < PART_LEN1; i += 4) {
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "lwc1 %[noise], 4(%[noisePow]) \n\t"
- "lwc1 %[noise2], 8(%[noisePow]) \n\t"
- "lwc1 %[noise3], 12(%[noisePow]) \n\t"
- "lwc1 %[noise4], 16(%[noisePow]) \n\t"
- "sqrt.s %[noise], %[noise] \n\t"
- "sqrt.s %[noise2], %[noise2] \n\t"
- "sqrt.s %[noise3], %[noise3] \n\t"
- "sqrt.s %[noise4], %[noise4] \n\t"
- "lwc1 %[tmp1f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp2f], 4(%[u_ptr]) \n\t"
- "lwc1 %[tmp3f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 12(%[u_ptr]) \n\t"
- "lwc1 %[tmp5f], 16(%[u_ptr]) \n\t"
- "lwc1 %[tmp6f], 20(%[u_ptr]) \n\t"
- "lwc1 %[tmp7f], 24(%[u_ptr]) \n\t"
- "lwc1 %[tmp8f], 28(%[u_ptr]) \n\t"
- "addiu %[noisePow], %[noisePow], 16 \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[noise] \n\t"
- "mul.s %[tmp2f], %[tmp2f], %[noise] \n\t"
- "mul.s %[tmp3f], %[tmp3f], %[noise2] \n\t"
- "mul.s %[tmp4f], %[tmp4f], %[noise2] \n\t"
- "mul.s %[tmp5f], %[tmp5f], %[noise3] \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[noise3] \n\t"
- "swc1 %[tmp1f], 0(%[u_ptr]) \n\t"
- "swc1 %[tmp3f], 8(%[u_ptr]) \n\t"
- "mul.s %[tmp8f], %[tmp8f], %[noise4] \n\t"
- "mul.s %[tmp7f], %[tmp7f], %[noise4] \n\t"
- "neg.s %[tmp2f] \n\t"
- "neg.s %[tmp4f] \n\t"
- "neg.s %[tmp6f] \n\t"
- "neg.s %[tmp8f] \n\t"
- "swc1 %[tmp5f], 16(%[u_ptr]) \n\t"
- "swc1 %[tmp7f], 24(%[u_ptr]) \n\t"
- "swc1 %[tmp2f], 4(%[u_ptr]) \n\t"
- "swc1 %[tmp4f], 12(%[u_ptr]) \n\t"
- "swc1 %[tmp6f], 20(%[u_ptr]) \n\t"
- "swc1 %[tmp8f], 28(%[u_ptr]) \n\t"
- "addiu %[u_ptr], %[u_ptr], 32 \n\t"
- ".set pop \n\t"
- : [u_ptr] "+r" (u_ptr), [noisePow] "+r" (noisePow),
- [noise] "=&f" (noise), [noise2] "=&f" (noise2),
- [noise3] "=&f" (noise3), [noise4] "=&f" (noise4),
- [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f),
- [tmp3f] "=&f" (tmp3f), [tmp4f] "=&f" (tmp4f),
- [tmp5f] "=&f" (tmp5f), [tmp6f] "=&f" (tmp6f),
- [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f)
- :
- : "memory");
- }
- u[PART_LEN][1] = 0;
- noisePow -= PART_LEN;
-
- u_ptr = &u[0][0];
- float* u_ptr_end = &u[PART_LEN][0];
- float* efw_ptr_0 = &efw[0][0];
- float* efw_ptr_1 = &efw[1][0];
- float tmp9f, tmp10f;
- const float tmp1c = 1.0;
-
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "1: \n\t"
- "lwc1 %[tmp1f], 0(%[lambda]) \n\t"
- "lwc1 %[tmp6f], 4(%[lambda]) \n\t"
- "addiu %[lambda], %[lambda], 8 \n\t"
- "c.lt.s %[tmp1f], %[tmp1c] \n\t"
- "bc1f 4f \n\t"
- " nop \n\t"
- "c.lt.s %[tmp6f], %[tmp1c] \n\t"
- "bc1f 3f \n\t"
- " nop \n\t"
- "2: \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
- "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
- "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
- "sqrt.s %[tmp1f], %[tmp1f] \n\t"
- "sqrt.s %[tmp6f], %[tmp6f] \n\t"
- "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
- "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
- "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
- "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
- "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
- "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
- "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
- "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
- "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "b 5f \n\t"
- " swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "3: \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
- "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
- "sqrt.s %[tmp1f], %[tmp1f] \n\t"
- "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
- "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
- "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
- "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "b 5f \n\t"
- " swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "4: \n\t"
- "c.lt.s %[tmp6f], %[tmp1c] \n\t"
- "bc1f 5f \n\t"
- " nop \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
- "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
- "sqrt.s %[tmp6f], %[tmp6f] \n\t"
- "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
- "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
- "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
- "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "5: \n\t"
- "addiu %[u_ptr], %[u_ptr], 16 \n\t"
- "addiu %[efw_ptr_0], %[efw_ptr_0], 8 \n\t"
- "bne %[u_ptr], %[u_ptr_end], 1b \n\t"
- " addiu %[efw_ptr_1], %[efw_ptr_1], 8 \n\t"
- ".set pop \n\t"
- : [lambda] "+r" (lambda), [u_ptr] "+r" (u_ptr),
- [efw_ptr_0] "+r" (efw_ptr_0), [efw_ptr_1] "+r" (efw_ptr_1),
- [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f), [tmp3f] "=&f" (tmp3f),
- [tmp4f] "=&f" (tmp4f), [tmp5f] "=&f" (tmp5f),
- [tmp6f] "=&f" (tmp6f), [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f),
- [tmp9f] "=&f" (tmp9f), [tmp10f] "=&f" (tmp10f)
- : [tmp1c] "f" (tmp1c), [u_ptr_end] "r" (u_ptr_end)
- : "memory");
-
- lambda -= PART_LEN;
- tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[PART_LEN] * lambda[PART_LEN], 0));
- // tmp = 1 - lambda[i];
- efw[0][PART_LEN] += tmp * u[PART_LEN][0];
- efw[1][PART_LEN] += tmp * u[PART_LEN][1];
-
- // For H band comfort noise
- // TODO(peah): don't compute noise and "tmp" twice. Use the previous results.
- noiseAvg = 0.0;
- tmpAvg = 0.0;
- num = 0;
- if (aec->num_bands > 1) {
- for (i = 0; i < PART_LEN; i++) {
- rand[i] = (static_cast<float>(randW16[i])) / 32768;
- }
-
- // average noise scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- noiseAvg += sqrtf(noisePow[i]);
- }
- noiseAvg /= static_cast<float>(num);
-
- // average nlp scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- num = 0;
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- }
- tmpAvg /= static_cast<float>(num);
-
- // Use average noise for H band
- // TODO(peah): we should probably have a new random vector here.
- // Reject LF noise
- u[0][0] = 0;
- u[0][1] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- // Use average noise for H band
- u[i][0] = noiseAvg * static_cast<float>(cos(tmp));
- u[i][1] = -noiseAvg * static_cast<float>(sin(tmp));
- }
- u[PART_LEN][1] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // Use average NLP weight for H band
- comfortNoiseHband[0][i] = tmpAvg * u[i][0];
- comfortNoiseHband[1][i] = tmpAvg * u[i][1];
- }
- } else {
- memset(comfortNoiseHband, 0,
- 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
- }
-}
-
void WebRtcAec_FilterFar_mips(
int num_partitions,
int x_fft_buf_block_pos,
@@ -644,24 +344,18 @@ void WebRtcAec_FilterAdaptation_mips(
}
}
-void WebRtcAec_OverdriveAndSuppress_mips(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
- int i;
+void WebRtcAec_Overdrive_mips(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
const float one = 1.0;
float* p_hNl;
- float* p_efw0;
- float* p_efw1;
const float* p_WebRtcAec_wC;
float temp1, temp2, temp3, temp4;
p_hNl = &hNl[0];
- p_efw0 = &efw[0][0];
- p_efw1 = &efw[1][0];
p_WebRtcAec_wC = &WebRtcAec_weightCurve[0];
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Weight subbands
__asm __volatile(
".set push \n\t"
@@ -687,8 +381,22 @@ void WebRtcAec_OverdriveAndSuppress_mips(AecCore* aec,
: [hNlFb] "f" (hNlFb), [one] "f" (one), [p_hNl] "r" (p_hNl)
: "memory");
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+
+void WebRtcAec_Suppress_mips(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]) {
+ const float* p_hNl;
+ float* p_efw0;
+ float* p_efw1;
+ float temp1, temp2, temp3, temp4;
+
+ p_hNl = &hNl[0];
+ p_efw0 = &efw[0][0];
+ p_efw1 = &efw[1][0];
+ for (int i = 0; i < PART_LEN1; ++i) {
__asm __volatile(
"lwc1 %[temp1], 0(%[p_hNl]) \n\t"
"lwc1 %[temp3], 0(%[p_efw1]) \n\t"
@@ -775,7 +483,7 @@ void WebRtcAec_InitAec_mips(void) {
WebRtcAec_FilterFar = WebRtcAec_FilterFar_mips;
WebRtcAec_FilterAdaptation = WebRtcAec_FilterAdaptation_mips;
WebRtcAec_ScaleErrorSignal = WebRtcAec_ScaleErrorSignal_mips;
- WebRtcAec_ComfortNoise = WebRtcAec_ComfortNoise_mips;
- WebRtcAec_OverdriveAndSuppress = WebRtcAec_OverdriveAndSuppress_mips;
+ WebRtcAec_Overdrive = WebRtcAec_Overdrive_mips;
+ WebRtcAec_Suppress = WebRtcAec_Suppress_mips;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
index c08ee426e61..bc503ba3db0 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
@@ -22,10 +22,8 @@ extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
@@ -376,15 +374,13 @@ static float32x4_t vpowq_f32(float32x4_t a, float32x4_t b) {
return a_exp_b;
}
-static void OverdriveAndSuppressNEON(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
+static void OverdriveNEON(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
int i;
const float32x4_t vec_hNlFb = vmovq_n_f32(hNlFb);
const float32x4_t vec_one = vdupq_n_f32(1.0f);
- const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
- const float32x4_t vec_overDriveSm = vmovq_n_f32(aec->overDriveSm);
+ const float32x4_t vec_overdrive_scaling = vmovq_n_f32(overdrive_scaling);
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
@@ -406,28 +402,12 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
vec_hNl = vreinterpretq_f32_u32(vorrq_u32(vec_if0, vec_if1));
- {
- const float32x4_t vec_overDriveCurve =
- vld1q_f32(&WebRtcAec_overDriveCurve[i]);
- const float32x4_t vec_overDriveSm_overDriveCurve =
- vmulq_f32(vec_overDriveSm, vec_overDriveCurve);
- vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
- vst1q_f32(&hNl[i], vec_hNl);
- }
-
- // Suppress error signal
- {
- float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
- float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
- vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
- vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
-
- // Ooura fft returns incorrect sign on imaginary component. It matters
- // here because we are making an additive change with comfort noise.
- vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
- vst1q_f32(&efw[0][i], vec_efw_re);
- vst1q_f32(&efw[1][i], vec_efw_im);
- }
+ const float32x4_t vec_overDriveCurve =
+ vld1q_f32(&WebRtcAec_overDriveCurve[i]);
+ const float32x4_t vec_overDriveSm_overDriveCurve =
+ vmulq_f32(vec_overdrive_scaling, vec_overDriveCurve);
+ vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
+ vst1q_f32(&hNl[i], vec_hNl);
}
// scalar code for the remaining items.
@@ -438,9 +418,30 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
- // Suppress error signal
+static void SuppressNEON(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ int i;
+ const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ float32x4_t vec_hNl = vld1q_f32(&hNl[i]);
+ float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
+ float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
+ vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
+ vst1q_f32(&efw[0][i], vec_efw_re);
+ vst1q_f32(&efw[1][i], vec_efw_im);
+ }
+
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -450,7 +451,9 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
}
}
-static int PartitionDelayNEON(const AecCore* aec) {
+static int PartitionDelayNEON(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -459,15 +462,15 @@ static int PartitionDelayNEON(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
float32x4_t vec_wfEn = vdupq_n_f32(0.0f);
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const float32x4_t vec_wfBuf0 = vld1q_f32(&aec->wfBuf[0][pos + j]);
- const float32x4_t vec_wfBuf1 = vld1q_f32(&aec->wfBuf[1][pos + j]);
+ const float32x4_t vec_wfBuf0 = vld1q_f32(&h_fft_buf[0][pos + j]);
+ const float32x4_t vec_wfBuf1 = vld1q_f32(&h_fft_buf[1][pos + j]);
vec_wfEn = vmlaq_f32(vec_wfEn, vec_wfBuf0, vec_wfBuf0);
vec_wfEn = vmlaq_f32(vec_wfEn, vec_wfBuf1, vec_wfBuf1);
}
@@ -483,8 +486,8 @@ static int PartitionDelayNEON(const AecCore* aec) {
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -504,16 +507,19 @@ static int PartitionDelayNEON(const AecCore* aec) {
//
// In addition to updating the PSDs, also the filter diverge state is determined
// upon actions are taken.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectraNEON(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
const float32x4_t vec_15 = vdupq_n_f32(WebRtcAec_kMinFarendPSD);
@@ -527,9 +533,12 @@ static void SmoothedPSD(AecCore* aec,
const float32x4_t vec_efw1 = vld1q_f32(&efw[1][i]);
const float32x4_t vec_xfw0 = vld1q_f32(&xfw[0][i]);
const float32x4_t vec_xfw1 = vld1q_f32(&xfw[1][i]);
- float32x4_t vec_sd = vmulq_n_f32(vld1q_f32(&aec->sd[i]), ptrGCoh[0]);
- float32x4_t vec_se = vmulq_n_f32(vld1q_f32(&aec->se[i]), ptrGCoh[0]);
- float32x4_t vec_sx = vmulq_n_f32(vld1q_f32(&aec->sx[i]), ptrGCoh[0]);
+ float32x4_t vec_sd =
+ vmulq_n_f32(vld1q_f32(&coherence_state->sd[i]), ptrGCoh[0]);
+ float32x4_t vec_se =
+ vmulq_n_f32(vld1q_f32(&coherence_state->se[i]), ptrGCoh[0]);
+ float32x4_t vec_sx =
+ vmulq_n_f32(vld1q_f32(&coherence_state->sx[i]), ptrGCoh[0]);
float32x4_t vec_dfw_sumsq = vmulq_f32(vec_dfw0, vec_dfw0);
float32x4_t vec_efw_sumsq = vmulq_f32(vec_efw0, vec_efw0);
float32x4_t vec_xfw_sumsq = vmulq_f32(vec_xfw0, vec_xfw0);
@@ -542,12 +551,12 @@ static void SmoothedPSD(AecCore* aec,
vec_se = vmlaq_n_f32(vec_se, vec_efw_sumsq, ptrGCoh[1]);
vec_sx = vmlaq_n_f32(vec_sx, vec_xfw_sumsq, ptrGCoh[1]);
- vst1q_f32(&aec->sd[i], vec_sd);
- vst1q_f32(&aec->se[i], vec_se);
- vst1q_f32(&aec->sx[i], vec_sx);
+ vst1q_f32(&coherence_state->sd[i], vec_sd);
+ vst1q_f32(&coherence_state->se[i], vec_se);
+ vst1q_f32(&coherence_state->sx[i], vec_sx);
{
- float32x4x2_t vec_sde = vld2q_f32(&aec->sde[i][0]);
+ float32x4x2_t vec_sde = vld2q_f32(&coherence_state->sde[i][0]);
float32x4_t vec_dfwefw0011 = vmulq_f32(vec_dfw0, vec_efw0);
float32x4_t vec_dfwefw0110 = vmulq_f32(vec_dfw0, vec_efw1);
vec_sde.val[0] = vmulq_n_f32(vec_sde.val[0], ptrGCoh[0]);
@@ -556,11 +565,11 @@ static void SmoothedPSD(AecCore* aec,
vec_dfwefw0110 = vmlsq_f32(vec_dfwefw0110, vec_dfw1, vec_efw0);
vec_sde.val[0] = vmlaq_n_f32(vec_sde.val[0], vec_dfwefw0011, ptrGCoh[1]);
vec_sde.val[1] = vmlaq_n_f32(vec_sde.val[1], vec_dfwefw0110, ptrGCoh[1]);
- vst2q_f32(&aec->sde[i][0], vec_sde);
+ vst2q_f32(&coherence_state->sde[i][0], vec_sde);
}
{
- float32x4x2_t vec_sxd = vld2q_f32(&aec->sxd[i][0]);
+ float32x4x2_t vec_sxd = vld2q_f32(&coherence_state->sxd[i][0]);
float32x4_t vec_dfwxfw0011 = vmulq_f32(vec_dfw0, vec_xfw0);
float32x4_t vec_dfwxfw0110 = vmulq_f32(vec_dfw0, vec_xfw1);
vec_sxd.val[0] = vmulq_n_f32(vec_sxd.val[0], ptrGCoh[0]);
@@ -569,7 +578,7 @@ static void SmoothedPSD(AecCore* aec,
vec_dfwxfw0110 = vmlsq_f32(vec_dfwxfw0110, vec_dfw1, vec_xfw0);
vec_sxd.val[0] = vmlaq_n_f32(vec_sxd.val[0], vec_dfwxfw0011, ptrGCoh[1]);
vec_sxd.val[1] = vmlaq_n_f32(vec_sxd.val[1], vec_dfwxfw0110, ptrGCoh[1]);
- vst2q_f32(&aec->sxd[i][0], vec_sxd);
+ vst2q_f32(&coherence_state->sxd[i][0], vec_sxd);
}
vec_sdSum = vaddq_f32(vec_sdSum, vec_sd);
@@ -593,39 +602,43 @@ static void SmoothedPSD(AecCore* aec,
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -669,30 +682,23 @@ static void StoreAsComplexNEON(const float* data,
data_complex[0][PART_LEN] = data[1];
}
-static void SubbandCoherenceNEON(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherenceNEON(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
+ float* cohxd) {
int i;
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
{
const float32x4_t vec_1eminus10 = vdupq_n_f32(1e-10f);
// Subband coherence
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const float32x4_t vec_sd = vld1q_f32(&aec->sd[i]);
- const float32x4_t vec_se = vld1q_f32(&aec->se[i]);
- const float32x4_t vec_sx = vld1q_f32(&aec->sx[i]);
+ const float32x4_t vec_sd = vld1q_f32(&coherence_state->sd[i]);
+ const float32x4_t vec_se = vld1q_f32(&coherence_state->se[i]);
+ const float32x4_t vec_sx = vld1q_f32(&coherence_state->sx[i]);
const float32x4_t vec_sdse = vmlaq_f32(vec_1eminus10, vec_sd, vec_se);
const float32x4_t vec_sdsx = vmlaq_f32(vec_1eminus10, vec_sd, vec_sx);
- float32x4x2_t vec_sde = vld2q_f32(&aec->sde[i][0]);
- float32x4x2_t vec_sxd = vld2q_f32(&aec->sxd[i][0]);
+ float32x4x2_t vec_sde = vld2q_f32(&coherence_state->sde[i][0]);
+ float32x4x2_t vec_sxd = vld2q_f32(&coherence_state->sxd[i][0]);
float32x4_t vec_cohde = vmulq_f32(vec_sde.val[0], vec_sde.val[0]);
float32x4_t vec_cohxd = vmulq_f32(vec_sxd.val[0], vec_sxd.val[0]);
vec_cohde = vmlaq_f32(vec_cohde, vec_sde.val[1], vec_sde.val[1]);
@@ -706,12 +712,12 @@ static void SubbandCoherenceNEON(AecCore* aec,
}
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
@@ -719,8 +725,10 @@ void WebRtcAec_InitAec_neon(void) {
WebRtcAec_FilterFar = FilterFarNEON;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignalNEON;
WebRtcAec_FilterAdaptation = FilterAdaptationNEON;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressNEON;
- WebRtcAec_SubbandCoherence = SubbandCoherenceNEON;
+ WebRtcAec_Overdrive = OverdriveNEON;
+ WebRtcAec_Suppress = SuppressNEON;
+ WebRtcAec_ComputeCoherence = ComputeCoherenceNEON;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectraNEON;
WebRtcAec_StoreAsComplex = StoreAsComplexNEON;
WebRtcAec_PartitionDelay = PartitionDelayNEON;
WebRtcAec_WindowData = WindowDataNEON;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h
new file mode 100644
index 00000000000..d1fb6e892a7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
+
+#include <memory>
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+typedef void (*WebRtcAecFilterFar)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]);
+extern WebRtcAecFilterFar WebRtcAec_FilterFar;
+typedef void (*WebRtcAecScaleErrorSignal)(float mu,
+ float error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]);
+extern WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
+typedef void (*WebRtcAecFilterAdaptation)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
+extern WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
+
+typedef void (*WebRtcAecOverdrive)(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]);
+extern WebRtcAecOverdrive WebRtcAec_Overdrive;
+
+typedef void (*WebRtcAecSuppress)(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]);
+extern WebRtcAecSuppress WebRtcAec_Suppress;
+
+typedef void (*WebRtcAecComputeCoherence)(const CoherenceState* coherence_state,
+ float* cohde,
+ float* cohxd);
+extern WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+
+typedef void (*WebRtcAecUpdateCoherenceSpectra)(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence);
+extern WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
+
+typedef int (*WebRtcAecPartitionDelay)(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
+extern WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
+
+typedef void (*WebRtcAecStoreAsComplex)(const float* data,
+ float data_complex[2][PART_LEN1]);
+extern WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
+
+typedef void (*WebRtcAecWindowData)(float* x_windowed, const float* x);
+extern WebRtcAecWindowData WebRtcAec_WindowData;
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
index c1a6e3de75b..47ba12f419e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
@@ -20,10 +20,8 @@ extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
@@ -377,15 +375,13 @@ static __m128 mm_pow_ps(__m128 a, __m128 b) {
return a_exp_b;
}
-static void OverdriveAndSuppressSSE2(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
+static void OverdriveSSE2(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
int i;
const __m128 vec_hNlFb = _mm_set1_ps(hNlFb);
const __m128 vec_one = _mm_set1_ps(1.0f);
- const __m128 vec_minus_one = _mm_set1_ps(-1.0f);
- const __m128 vec_overDriveSm = _mm_set1_ps(aec->overDriveSm);
+ const __m128 vec_overdrive_scaling = _mm_set1_ps(overdrive_scaling);
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
// Weight subbands
@@ -401,28 +397,12 @@ static void OverdriveAndSuppressSSE2(AecCore* aec,
bigger, _mm_add_ps(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl));
vec_hNl = _mm_or_ps(vec_if0, vec_if1);
- {
- const __m128 vec_overDriveCurve =
- _mm_loadu_ps(&WebRtcAec_overDriveCurve[i]);
- const __m128 vec_overDriveSm_overDriveCurve =
- _mm_mul_ps(vec_overDriveSm, vec_overDriveCurve);
- vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve);
- _mm_storeu_ps(&hNl[i], vec_hNl);
- }
-
- // Suppress error signal
- {
- __m128 vec_efw_re = _mm_loadu_ps(&efw[0][i]);
- __m128 vec_efw_im = _mm_loadu_ps(&efw[1][i]);
- vec_efw_re = _mm_mul_ps(vec_efw_re, vec_hNl);
- vec_efw_im = _mm_mul_ps(vec_efw_im, vec_hNl);
-
- // Ooura fft returns incorrect sign on imaginary component. It matters
- // here because we are making an additive change with comfort noise.
- vec_efw_im = _mm_mul_ps(vec_efw_im, vec_minus_one);
- _mm_storeu_ps(&efw[0][i], vec_efw_re);
- _mm_storeu_ps(&efw[1][i], vec_efw_im);
- }
+ const __m128 vec_overDriveCurve =
+ _mm_loadu_ps(&WebRtcAec_overDriveCurve[i]);
+ const __m128 vec_overDriveSm_overDriveCurve =
+ _mm_mul_ps(vec_overdrive_scaling, vec_overDriveCurve);
+ vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve);
+ _mm_storeu_ps(&hNl[i], vec_hNl);
}
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
@@ -431,8 +411,30 @@ static void OverdriveAndSuppressSSE2(AecCore* aec,
hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+
+static void SuppressSSE2(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ int i;
+ const __m128 vec_minus_one = _mm_set1_ps(-1.0f);
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ // Suppress error signal
+ __m128 vec_hNl = _mm_loadu_ps(&hNl[i]);
+ __m128 vec_efw_re = _mm_loadu_ps(&efw[0][i]);
+ __m128 vec_efw_im = _mm_loadu_ps(&efw[1][i]);
+ vec_efw_re = _mm_mul_ps(vec_efw_re, vec_hNl);
+ vec_efw_im = _mm_mul_ps(vec_efw_im, vec_hNl);
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = _mm_mul_ps(vec_efw_im, vec_minus_one);
+ _mm_storeu_ps(&efw[0][i], vec_efw_re);
+ _mm_storeu_ps(&efw[1][i], vec_efw_im);
+ }
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
// Suppress error signal
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -451,7 +453,9 @@ __inline static void _mm_add_ps_4x1(__m128 sum, float* dst) {
_mm_store_ss(dst, sum);
}
-static int PartitionDelaySSE2(const AecCore* aec) {
+static int PartitionDelaySSE2(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -460,15 +464,15 @@ static int PartitionDelaySSE2(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
__m128 vec_wfEn = _mm_set1_ps(0.0f);
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const __m128 vec_wfBuf0 = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
- const __m128 vec_wfBuf1 = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
+ const __m128 vec_wfBuf0 = _mm_loadu_ps(&h_fft_buf[0][pos + j]);
+ const __m128 vec_wfBuf1 = _mm_loadu_ps(&h_fft_buf[1][pos + j]);
vec_wfEn = _mm_add_ps(vec_wfEn, _mm_mul_ps(vec_wfBuf0, vec_wfBuf0));
vec_wfEn = _mm_add_ps(vec_wfEn, _mm_mul_ps(vec_wfBuf1, vec_wfBuf1));
}
@@ -476,8 +480,8 @@ static int PartitionDelaySSE2(const AecCore* aec) {
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -497,16 +501,19 @@ static int PartitionDelaySSE2(const AecCore* aec) {
//
// In addition to updating the PSDs, also the filter diverge state is determined
// upon actions are taken.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectraSSE2(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
const __m128 vec_15 = _mm_set1_ps(WebRtcAec_kMinFarendPSD);
@@ -522,9 +529,12 @@ static void SmoothedPSD(AecCore* aec,
const __m128 vec_efw1 = _mm_loadu_ps(&efw[1][i]);
const __m128 vec_xfw0 = _mm_loadu_ps(&xfw[0][i]);
const __m128 vec_xfw1 = _mm_loadu_ps(&xfw[1][i]);
- __m128 vec_sd = _mm_mul_ps(_mm_loadu_ps(&aec->sd[i]), vec_GCoh0);
- __m128 vec_se = _mm_mul_ps(_mm_loadu_ps(&aec->se[i]), vec_GCoh0);
- __m128 vec_sx = _mm_mul_ps(_mm_loadu_ps(&aec->sx[i]), vec_GCoh0);
+ __m128 vec_sd =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->sd[i]), vec_GCoh0);
+ __m128 vec_se =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->se[i]), vec_GCoh0);
+ __m128 vec_sx =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->sx[i]), vec_GCoh0);
__m128 vec_dfw_sumsq = _mm_mul_ps(vec_dfw0, vec_dfw0);
__m128 vec_efw_sumsq = _mm_mul_ps(vec_efw0, vec_efw0);
__m128 vec_xfw_sumsq = _mm_mul_ps(vec_xfw0, vec_xfw0);
@@ -535,13 +545,13 @@ static void SmoothedPSD(AecCore* aec,
vec_sd = _mm_add_ps(vec_sd, _mm_mul_ps(vec_dfw_sumsq, vec_GCoh1));
vec_se = _mm_add_ps(vec_se, _mm_mul_ps(vec_efw_sumsq, vec_GCoh1));
vec_sx = _mm_add_ps(vec_sx, _mm_mul_ps(vec_xfw_sumsq, vec_GCoh1));
- _mm_storeu_ps(&aec->sd[i], vec_sd);
- _mm_storeu_ps(&aec->se[i], vec_se);
- _mm_storeu_ps(&aec->sx[i], vec_sx);
+ _mm_storeu_ps(&coherence_state->sd[i], vec_sd);
+ _mm_storeu_ps(&coherence_state->se[i], vec_se);
+ _mm_storeu_ps(&coherence_state->sx[i], vec_sx);
{
- const __m128 vec_3210 = _mm_loadu_ps(&aec->sde[i][0]);
- const __m128 vec_7654 = _mm_loadu_ps(&aec->sde[i + 2][0]);
+ const __m128 vec_3210 = _mm_loadu_ps(&coherence_state->sde[i][0]);
+ const __m128 vec_7654 = _mm_loadu_ps(&coherence_state->sde[i + 2][0]);
__m128 vec_a =
_mm_shuffle_ps(vec_3210, vec_7654, _MM_SHUFFLE(2, 0, 2, 0));
__m128 vec_b =
@@ -556,13 +566,14 @@ static void SmoothedPSD(AecCore* aec,
_mm_sub_ps(vec_dfwefw0110, _mm_mul_ps(vec_dfw1, vec_efw0));
vec_a = _mm_add_ps(vec_a, _mm_mul_ps(vec_dfwefw0011, vec_GCoh1));
vec_b = _mm_add_ps(vec_b, _mm_mul_ps(vec_dfwefw0110, vec_GCoh1));
- _mm_storeu_ps(&aec->sde[i][0], _mm_unpacklo_ps(vec_a, vec_b));
- _mm_storeu_ps(&aec->sde[i + 2][0], _mm_unpackhi_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sde[i][0], _mm_unpacklo_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sde[i + 2][0],
+ _mm_unpackhi_ps(vec_a, vec_b));
}
{
- const __m128 vec_3210 = _mm_loadu_ps(&aec->sxd[i][0]);
- const __m128 vec_7654 = _mm_loadu_ps(&aec->sxd[i + 2][0]);
+ const __m128 vec_3210 = _mm_loadu_ps(&coherence_state->sxd[i][0]);
+ const __m128 vec_7654 = _mm_loadu_ps(&coherence_state->sxd[i + 2][0]);
__m128 vec_a =
_mm_shuffle_ps(vec_3210, vec_7654, _MM_SHUFFLE(2, 0, 2, 0));
__m128 vec_b =
@@ -577,8 +588,9 @@ static void SmoothedPSD(AecCore* aec,
_mm_sub_ps(vec_dfwxfw0110, _mm_mul_ps(vec_dfw1, vec_xfw0));
vec_a = _mm_add_ps(vec_a, _mm_mul_ps(vec_dfwxfw0011, vec_GCoh1));
vec_b = _mm_add_ps(vec_b, _mm_mul_ps(vec_dfwxfw0110, vec_GCoh1));
- _mm_storeu_ps(&aec->sxd[i][0], _mm_unpacklo_ps(vec_a, vec_b));
- _mm_storeu_ps(&aec->sxd[i + 2][0], _mm_unpackhi_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sxd[i][0], _mm_unpacklo_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sxd[i + 2][0],
+ _mm_unpackhi_ps(vec_a, vec_b));
}
vec_sdSum = _mm_add_ps(vec_sdSum, vec_sd);
@@ -589,39 +601,43 @@ static void SmoothedPSD(AecCore* aec,
_mm_add_ps_4x1(vec_seSum, &seSum);
for (; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -668,34 +684,27 @@ static void StoreAsComplexSSE2(const float* data,
data_complex[0][PART_LEN] = data[1];
}
-static void SubbandCoherenceSSE2(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherenceSSE2(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
+ float* cohxd) {
int i;
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
{
const __m128 vec_1eminus10 = _mm_set1_ps(1e-10f);
// Subband coherence
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const __m128 vec_sd = _mm_loadu_ps(&aec->sd[i]);
- const __m128 vec_se = _mm_loadu_ps(&aec->se[i]);
- const __m128 vec_sx = _mm_loadu_ps(&aec->sx[i]);
+ const __m128 vec_sd = _mm_loadu_ps(&coherence_state->sd[i]);
+ const __m128 vec_se = _mm_loadu_ps(&coherence_state->se[i]);
+ const __m128 vec_sx = _mm_loadu_ps(&coherence_state->sx[i]);
const __m128 vec_sdse =
_mm_add_ps(vec_1eminus10, _mm_mul_ps(vec_sd, vec_se));
const __m128 vec_sdsx =
_mm_add_ps(vec_1eminus10, _mm_mul_ps(vec_sd, vec_sx));
- const __m128 vec_sde_3210 = _mm_loadu_ps(&aec->sde[i][0]);
- const __m128 vec_sde_7654 = _mm_loadu_ps(&aec->sde[i + 2][0]);
- const __m128 vec_sxd_3210 = _mm_loadu_ps(&aec->sxd[i][0]);
- const __m128 vec_sxd_7654 = _mm_loadu_ps(&aec->sxd[i + 2][0]);
+ const __m128 vec_sde_3210 = _mm_loadu_ps(&coherence_state->sde[i][0]);
+ const __m128 vec_sde_7654 = _mm_loadu_ps(&coherence_state->sde[i + 2][0]);
+ const __m128 vec_sxd_3210 = _mm_loadu_ps(&coherence_state->sxd[i][0]);
+ const __m128 vec_sxd_7654 = _mm_loadu_ps(&coherence_state->sxd[i + 2][0]);
const __m128 vec_sde_0 =
_mm_shuffle_ps(vec_sde_3210, vec_sde_7654, _MM_SHUFFLE(2, 0, 2, 0));
const __m128 vec_sde_1 =
@@ -716,12 +725,12 @@ static void SubbandCoherenceSSE2(AecCore* aec,
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
}
@@ -730,8 +739,10 @@ void WebRtcAec_InitAec_SSE2(void) {
WebRtcAec_FilterFar = FilterFarSSE2;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignalSSE2;
WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
- WebRtcAec_SubbandCoherence = SubbandCoherenceSSE2;
+ WebRtcAec_Overdrive = OverdriveSSE2;
+ WebRtcAec_Suppress = SuppressSSE2;
+ WebRtcAec_ComputeCoherence = ComputeCoherenceSSE2;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectraSSE2;
WebRtcAec_StoreAsComplex = StoreAsComplexSSE2;
WebRtcAec_PartitionDelay = PartitionDelaySSE2;
WebRtcAec_WindowData = WindowDataSSE2;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc
index 03efc103ea6..690fe9f34f9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc
@@ -581,9 +581,5 @@ void aec_rdft_init(void) {
#endif
#if defined(WEBRTC_HAS_NEON)
aec_rdft_init_neon();
-#elif defined(WEBRTC_DETECT_NEON)
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- aec_rdft_init_neon();
- }
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
index 18eb7a5c3f3..d83eb27d83f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
@@ -54,7 +54,7 @@ void aec_rdft_inverse_128(float* a);
#if defined(MIPS_FPU_LE)
void aec_rdft_init_mips(void);
#endif
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void aec_rdft_init_neon(void);
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc
index 7e64e657167..7e64e657167 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc
index 43b6a68cd72..43b6a68cd72 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc
index b4e453ff53e..b4e453ff53e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
index f963a4e1d30..716da38ab88 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
@@ -14,9 +14,6 @@
#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include <math.h>
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-#include <stdio.h>
-#endif
#include <stdlib.h>
#include <string.h>
@@ -26,7 +23,7 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/aec/aec_core.h"
#include "webrtc/modules/audio_processing/aec/aec_resampler.h"
-#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -99,9 +96,7 @@ static const int kMaxBufSizeStart = 62; // In partitions
static const int sampMsNb = 8; // samples per ms in nb
static const int initCheck = 42;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-int webrtc_aec_instance_count = 0;
-#endif
+int Aec::instance_count = 0;
// Estimates delay to set the position of the far-end buffer read pointer
// (controlled by knownDelay)
@@ -123,13 +118,14 @@ static void ProcessExtended(Aec* self,
int32_t skew);
void* WebRtcAec_Create() {
- Aec* aecpc = reinterpret_cast<Aec*>(malloc(sizeof(Aec)));
+ Aec* aecpc = new Aec();
if (!aecpc) {
return NULL;
}
+ aecpc->data_dumper.reset(new ApmDataDumper(aecpc->instance_count));
- aecpc->aec = WebRtcAec_CreateAec();
+ aecpc->aec = WebRtcAec_CreateAec(aecpc->instance_count);
if (!aecpc->aec) {
WebRtcAec_Free(aecpc);
return NULL;
@@ -151,22 +147,7 @@ void* WebRtcAec_Create() {
aecpc->initFlag = 0;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- char filename[64];
- snprintf(filename, sizeof(filename), "aec_buf%d.dat",
- webrtc_aec_instance_count);
- aecpc->bufFile = fopen(filename, "wb");
- snprintf(filename, sizeof(filename), "aec_skew%d.dat",
- webrtc_aec_instance_count);
- aecpc->skewFile = fopen(filename, "wb");
- snprintf(filename, sizeof(filename), "aec_delay%d.dat",
- webrtc_aec_instance_count);
- aecpc->delayFile = fopen(filename, "wb");
- webrtc_aec_instance_count++;
- }
-#endif
-
+ aecpc->instance_count++;
return aecpc;
}
@@ -179,19 +160,14 @@ void WebRtcAec_Free(void* aecInst) {
WebRtc_FreeBuffer(aecpc->far_pre_buf);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- fclose(aecpc->bufFile);
- fclose(aecpc->skewFile);
- fclose(aecpc->delayFile);
-#endif
-
WebRtcAec_FreeAec(aecpc->aec);
WebRtcAec_FreeResampler(aecpc->resampler);
- free(aecpc);
+ delete aecpc;
}
int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
Aec* aecpc = reinterpret_cast<Aec*>(aecInst);
+ aecpc->data_dumper->InitiateNewSetOfRecordings();
AecConfig aecConfig;
if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000 &&
@@ -376,15 +352,9 @@ int32_t WebRtcAec_Process(void* aecInst,
msInSndCardBuf, skew);
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- int16_t far_buf_size_ms = (int16_t)(WebRtcAec_system_delay(aecpc->aec) /
- (sampMsNb * aecpc->rate_factor));
- (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile);
- (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1,
- aecpc->delayFile);
- }
-#endif
+ int far_buf_size_samples = WebRtcAec_system_delay(aecpc->aec);
+ aecpc->data_dumper->DumpRaw("aec_system_delay", 1, &far_buf_size_samples);
+ aecpc->data_dumper->DumpRaw("aec_known_delay", 1, &aecpc->knownDelay);
return retVal;
}
@@ -603,9 +573,7 @@ static int ProcessNormal(Aec* aecpc,
aecpc->skew = maxSkewEst;
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- (void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
-#endif
+ aecpc->data_dumper->DumpRaw("aec_skew", 1, &aecpc->skew);
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
index f4b1f20ab75..8e5e52c32bf 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
@@ -11,8 +11,14 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
+#include <memory>
+
#include <stddef.h>
+extern "C" {
+#include "webrtc/common_audio/ring_buffer.h"
+}
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -56,6 +62,54 @@ typedef struct {
struct AecCore;
+class ApmDataDumper;
+
+typedef struct Aec {
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ int delayCtr;
+ int sampFreq;
+ int splitSampFreq;
+ int scSampFreq;
+ float sampFactor; // scSampRate / sampFreq
+ short skewMode;
+ int bufSizeStart;
+ int knownDelay;
+ int rate_factor;
+
+ short initFlag; // indicates if AEC has been initialized
+
+ // Variables used for averaging far end buffer size
+ short counter;
+ int sum;
+ short firstVal;
+ short checkBufSizeCtr;
+
+ // Variables used for delay shifts
+ short msInSndCardBuf;
+ short filtDelay; // Filtered delay estimate.
+ int timeForDelayChange;
+ int startup_phase;
+ int checkBuffSize;
+ short lastDelayDiff;
+
+ // Structures
+ void* resampler;
+
+ int skewFrCtr;
+ int resample; // if the skew is small enough we don't resample
+ int highSkewCtr;
+ float skew;
+
+ RingBuffer* far_pre_buf; // Time domain far-end pre-buffer.
+
+ int farend_started;
+
+ // Aec instance counter.
+ static int instance_count;
+ AecCore* aec;
+} Aec;
+
/*
* Allocates the memory needed by the AEC. The memory needs to be initialized
* separately using the WebRtcAec_Init() function. Returns a pointer to the
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
deleted file mode 100644
index b4a6fd8390e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
-
-extern "C" {
-#include "webrtc/common_audio/ring_buffer.h"
-}
-#include "webrtc/modules/audio_processing/aec/aec_core.h"
-
-namespace webrtc {
-
-typedef struct {
- int delayCtr;
- int sampFreq;
- int splitSampFreq;
- int scSampFreq;
- float sampFactor; // scSampRate / sampFreq
- short skewMode;
- int bufSizeStart;
- int knownDelay;
- int rate_factor;
-
- short initFlag; // indicates if AEC has been initialized
-
- // Variables used for averaging far end buffer size
- short counter;
- int sum;
- short firstVal;
- short checkBufSizeCtr;
-
- // Variables used for delay shifts
- short msInSndCardBuf;
- short filtDelay; // Filtered delay estimate.
- int timeForDelayChange;
- int startup_phase;
- int checkBuffSize;
- short lastDelayDiff;
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- FILE* bufFile;
- FILE* delayFile;
- FILE* skewFile;
-#endif
-
- // Structures
- void* resampler;
-
- int skewFrCtr;
- int resample; // if the skew is small enough we don't resample
- int highSkewCtr;
- float skew;
-
- RingBuffer* far_pre_buf; // Time domain far-end pre-buffer.
-
- int farend_started;
-
- AecCore* aec;
-} Aec;
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
index be145898b59..51a4df2b782 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
@@ -10,7 +10,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_processing/aec/aec_core.h"
-#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc
index 6bf1cf7f3ef..a17220dbd76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc
@@ -14,12 +14,16 @@
#include <stddef.h>
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
+}
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
+extern "C" {
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+}
+
#include "webrtc/typedefs.h"
#ifdef AEC_DEBUG
@@ -208,7 +212,7 @@ StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
AecmCore* WebRtcAecm_CreateCore() {
- AecmCore* aecm = malloc(sizeof(AecmCore));
+ AecmCore* aecm = static_cast<AecmCore*>(malloc(sizeof(AecmCore)));
aecm->farFrameBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN,
sizeof(int16_t));
@@ -361,7 +365,7 @@ static void ResetAdaptiveChannelC(AecmCore* aecm) {
}
// Initialize function pointers for ARM Neon platform.
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
static void WebRtcAecm_InitNeon(void)
{
WebRtcAecm_StoreAdaptiveChannel = WebRtcAecm_StoreAdaptiveChannelNeon;
@@ -501,20 +505,14 @@ int WebRtcAecm_InitCore(AecmCore* const aecm, int samplingFreq) {
// Assert a preprocessor definition at compile-time. It's an assumption
// used in assembly code, so check the assembly files before any change.
- COMPILE_ASSERT(PART_LEN % 16 == 0);
+ static_assert(PART_LEN % 16 == 0, "PART_LEN is not a multiple of 16");
// Initialize function pointers.
WebRtcAecm_CalcLinearEnergies = CalcLinearEnergiesC;
WebRtcAecm_StoreAdaptiveChannel = StoreAdaptiveChannelC;
WebRtcAecm_ResetAdaptiveChannel = ResetAdaptiveChannelC;
-#ifdef WEBRTC_DETECT_NEON
- uint64_t features = WebRtc_GetCPUFeaturesARM();
- if ((features & kCPUFeatureNEON) != 0)
- {
- WebRtcAecm_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcAecm_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
index b52bb62d2de..33d80889aa4 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
@@ -13,8 +13,10 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+}
#include "webrtc/modules/audio_processing/aecm/aecm_defines.h"
#include "webrtc/typedefs.h"
@@ -400,7 +402,7 @@ extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file aecm_core.c, while those for ARM Neon platforms
// are declared below and defined in file aecm_core_neon.c.
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc
index 3a8fafa4ece..57f859f550d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc
@@ -14,19 +14,18 @@
#include <stddef.h>
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
+}
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
+extern "C" {
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+}
#include "webrtc/typedefs.h"
// Square root of Hanning window in Q14.
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
-// Table is defined in an ARM assembly file.
-extern const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END;
-#else
static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224,
@@ -37,7 +36,6 @@ static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034,
16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384
};
-#endif
#ifdef AECM_WITH_ABS_APPROX
//Q15 alpha = 0.99439986968132 const Factor for magnitude approximation
@@ -768,4 +766,3 @@ static void ComfortNoise(AecmCore* aecm,
out[i].imag = WebRtcSpl_AddSatW16(out[i].imag, uImag[i]);
}
}
-
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc
index 3ca9982ebfa..e625a46ec5f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc
@@ -1563,4 +1563,3 @@ static void ComfortNoise(AecmCore* aecm,
sgn = ((int)tt) >> 31;
out[PART_LEN].imag = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn);
}
-
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc
index 1751fcf7ada..81c7667d981 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc
@@ -18,19 +18,6 @@
// TODO(kma): Re-write the corresponding assembly file, the offset
// generating script and makefile, to replace these C functions.
-// Square root of Hanning window in Q14.
-const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
- 0,
- 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
- 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224,
- 6591, 6954, 7313, 7668, 8019, 8364, 8705, 9040,
- 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514,
- 11795, 12068, 12335, 12594, 12845, 13089, 13325, 13553,
- 13773, 13985, 14189, 14384, 14571, 14749, 14918, 15079,
- 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034,
- 16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384
-};
-
static inline void AddLanes(uint32_t* ptr, uint32x4_t v) {
#if defined(WEBRTC_ARCH_ARM64)
*(ptr) = vaddvq_u32(v);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc
index 91e6f0e80ce..a81466e678e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc
@@ -15,8 +15,10 @@
#endif
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+}
#include "webrtc/modules/audio_processing/aecm/aecm_core.h"
#define BUF_SIZE_FRAMES 50 // buffer size (frames)
@@ -79,7 +81,7 @@ static int WebRtcAecm_EstBufDelay(AecMobile* aecmInst, short msInSndCardBuf);
static int WebRtcAecm_DelayComp(AecMobile* aecmInst);
void* WebRtcAecm_Create() {
- AecMobile* aecm = malloc(sizeof(AecMobile));
+ AecMobile* aecm = static_cast<AecMobile*>(malloc(sizeof(AecMobile)));
WebRtcSpl_Init();
@@ -114,7 +116,7 @@ void* WebRtcAecm_Create() {
}
void WebRtcAecm_Free(void* aecmInst) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL) {
return;
@@ -138,7 +140,7 @@ void WebRtcAecm_Free(void* aecmInst) {
int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
AecmConfig aecConfig;
if (aecm == NULL)
@@ -196,7 +198,7 @@ int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
// farend signal.
int32_t WebRtcAecm_GetBufferFarendError(void *aecmInst, const int16_t *farend,
size_t nrOfSamples) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL)
return -1;
@@ -216,7 +218,7 @@ int32_t WebRtcAecm_GetBufferFarendError(void *aecmInst, const int16_t *farend,
int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
size_t nrOfSamples) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
const int32_t err =
WebRtcAecm_GetBufferFarendError(aecmInst, farend, nrOfSamples);
@@ -239,7 +241,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
const int16_t *nearendClean, int16_t *out,
size_t nrOfSamples, int16_t msInSndCardBuf)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
int32_t retVal = 0;
size_t i;
short nmbrOfFilledBuffers;
@@ -435,7 +437,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL)
{
@@ -516,8 +518,8 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path,
size_t size_bytes)
{
- AecMobile* aecm = aecmInst;
- const int16_t* echo_path_ptr = echo_path;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
+ const int16_t* echo_path_ptr = static_cast<const int16_t*>(echo_path);
if (aecmInst == NULL) {
return -1;
@@ -544,8 +546,8 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
void* echo_path,
size_t size_bytes)
{
- AecMobile* aecm = aecmInst;
- int16_t* echo_path_ptr = echo_path;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
+ int16_t* echo_path_ptr = static_cast<int16_t*>(echo_path);
if (aecmInst == NULL) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
index 264f3e5befb..e6eab36a01e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
@@ -9,6 +9,8 @@
{
'variables': {
'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
+ # Outputs some low-level debug files.
+ 'aec_debug_dump%': 0,
},
'targets': [
{
@@ -16,7 +18,6 @@
'type': 'static_library',
'variables': {
# Outputs some low-level debug files.
- 'aec_debug_dump%': 0,
'agc_debug_dump%': 0,
# Disables the usual mode where we trust the reported system delay
@@ -34,17 +35,16 @@
'sources': [
'aec/aec_core.cc',
'aec/aec_core.h',
- 'aec/aec_core_internal.h',
- 'aec/aec_rdft.c',
+ 'aec/aec_core_optimized_methods.h',
+ 'aec/aec_rdft.cc',
'aec/aec_rdft.h',
'aec/aec_resampler.cc',
'aec/aec_resampler.h',
'aec/echo_cancellation.cc',
- 'aec/echo_cancellation_internal.h',
'aec/echo_cancellation.h',
- 'aecm/aecm_core.c',
+ 'aecm/aecm_core.cc',
'aecm/aecm_core.h',
- 'aecm/echo_control_mobile.c',
+ 'aecm/echo_control_mobile.cc',
'aecm/echo_control_mobile.h',
'agc/agc.cc',
'agc/agc.h',
@@ -91,9 +91,8 @@
'intelligibility/intelligibility_utils.h',
'level_estimator_impl.cc',
'level_estimator_impl.h',
- 'logging/aec_logging.h',
- 'logging/aec_logging_file_handling.cc',
- 'logging/aec_logging_file_handling.h',
+ 'logging/apm_data_dumper.cc',
+ 'logging/apm_data_dumper.h',
'noise_suppression_impl.cc',
'noise_suppression_impl.h',
'render_queue_item_verifier.h',
@@ -120,10 +119,10 @@
'typing_detection.h',
'utility/block_mean_calculator.cc',
'utility/block_mean_calculator.h',
- 'utility/delay_estimator.c',
+ 'utility/delay_estimator.cc',
'utility/delay_estimator.h',
'utility/delay_estimator_internal.h',
- 'utility/delay_estimator_wrapper.c',
+ 'utility/delay_estimator_wrapper.cc',
'utility/delay_estimator_wrapper.h',
'vad/common.h',
'vad/gmm.cc',
@@ -150,7 +149,9 @@
],
'conditions': [
['aec_debug_dump==1', {
- 'defines': ['WEBRTC_AEC_DEBUG_DUMP',],
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }, {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
}],
['aec_untrusted_delay_for_testing==1', {
'defines': ['WEBRTC_UNTRUSTED_DELAY',],
@@ -201,19 +202,19 @@
}],
['target_arch=="mipsel" and mips_arch_variant!="r6"', {
'sources': [
- 'aecm/aecm_core_mips.c',
+ 'aecm/aecm_core_mips.cc',
],
'conditions': [
['mips_float_abi=="hard"', {
'sources': [
'aec/aec_core_mips.cc',
- 'aec/aec_rdft_mips.c',
+ 'aec/aec_rdft_mips.cc',
],
}],
],
}, {
'sources': [
- 'aecm/aecm_core_c.c',
+ 'aecm/aecm_core_c.cc',
],
}],
],
@@ -246,9 +247,14 @@
'type': 'static_library',
'sources': [
'aec/aec_core_sse2.cc',
- 'aec/aec_rdft_sse2.c',
+ 'aec/aec_rdft_sse2.cc',
],
'conditions': [
+ ['aec_debug_dump==1', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }, {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
+ }],
['os_posix==1', {
'cflags': [ '-msse2', ],
'xcode_settings': {
@@ -269,10 +275,18 @@
],
'sources': [
'aec/aec_core_neon.cc',
- 'aec/aec_rdft_neon.c',
- 'aecm/aecm_core_neon.c',
+ 'aec/aec_rdft_neon.cc',
+ 'aecm/aecm_core_neon.cc',
'ns/nsx_core_neon.c',
],
+ 'conditions': [
+ ['aec_debug_dump==1', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }],
+ ['aec_debug_dump==0', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
+ }],
+ ],
}],
}],
],
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
index 2b6f1c4ac19..e75b3280346 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -94,7 +94,7 @@ bool is_multi_band(int sample_rate_hz) {
sample_rate_hz == AudioProcessing::kSampleRate48kHz;
}
-int ClosestNativeRate(int min_proc_rate) {
+int ClosestHigherNativeRate(int min_proc_rate) {
for (int rate : AudioProcessing::kNativeSampleRatesHz) {
if (rate >= min_proc_rate) {
return rate;
@@ -163,12 +163,10 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config,
private_submodules_(new ApmPrivateSubmodules(beamformer)),
constants_(config.Get<ExperimentalAgc>().startup_min_volume,
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
- false,
+ false),
#else
- config.Get<ExperimentalAgc>().enabled,
+ config.Get<ExperimentalAgc>().enabled),
#endif
- config.Get<Intelligibility>().enabled),
-
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
capture_(false,
#else
@@ -176,7 +174,8 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config,
#endif
config.Get<Beamforming>().array_geometry,
config.Get<Beamforming>().target_direction),
- capture_nonlocked_(config.Get<Beamforming>().enabled)
+ capture_nonlocked_(config.Get<Beamforming>().enabled,
+ config.Get<Intelligibility>().enabled)
{
{
rtc::CritScope cs_render(&crit_render_);
@@ -362,22 +361,24 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
formats_.api_format = config;
- capture_nonlocked_.fwd_proc_format = StreamConfig(ClosestNativeRate(std::min(
- formats_.api_format.input_stream().sample_rate_hz(),
- formats_.api_format.output_stream().sample_rate_hz())));
+ capture_nonlocked_.fwd_proc_format = StreamConfig(ClosestHigherNativeRate(
+ std::min(formats_.api_format.input_stream().sample_rate_hz(),
+ formats_.api_format.output_stream().sample_rate_hz())));
- // We normally process the reverse stream at 16 kHz. Unless...
- int rev_proc_rate = kSampleRate16kHz;
+ int rev_proc_rate = ClosestHigherNativeRate(std::min(
+ formats_.api_format.reverse_input_stream().sample_rate_hz(),
+ formats_.api_format.reverse_output_stream().sample_rate_hz()));
+ // TODO(aluebs): Remove this restriction once we figure out why the 3-band
+ // splitting filter degrades the AEC performance.
+ if (rev_proc_rate > kSampleRate32kHz) {
+ rev_proc_rate = is_rev_processed() ? kSampleRate32kHz : kSampleRate16kHz;
+ }
+ // If the forward sample rate is 8 kHz, the reverse stream is also processed
+ // at this rate.
if (capture_nonlocked_.fwd_proc_format.sample_rate_hz() == kSampleRate8kHz) {
- // ...the forward stream is at 8 kHz.
rev_proc_rate = kSampleRate8kHz;
} else {
- if (formats_.api_format.reverse_input_stream().sample_rate_hz() ==
- kSampleRate32kHz) {
- // ...or the input is at 32 kHz, in which case we use the splitting
- // filter rather than the resampler.
- rev_proc_rate = kSampleRate32kHz;
- }
+ rev_proc_rate = std::max(rev_proc_rate, static_cast<int>(kSampleRate16kHz));
}
// Always downmix the reverse stream to mono for analysis. This has been
@@ -409,6 +410,13 @@ void AudioProcessingImpl::SetExtraOptions(const Config& config) {
InitializeTransient();
}
+ if(capture_nonlocked_.intelligibility_enabled !=
+ config.Get<Intelligibility>().enabled) {
+ capture_nonlocked_.intelligibility_enabled =
+ config.Get<Intelligibility>().enabled;
+ InitializeIntelligibility();
+ }
+
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
if (capture_nonlocked_.beamformer_enabled !=
config.Get<Beamforming>().enabled) {
@@ -702,10 +710,13 @@ int AudioProcessingImpl::ProcessStreamLocked() {
ca->CopyLowPassToReference();
}
public_submodules_->noise_suppression->ProcessCaptureAudio(ca);
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
RTC_DCHECK(public_submodules_->noise_suppression->is_enabled());
+ int gain_db = public_submodules_->gain_control->is_enabled() ?
+ public_submodules_->gain_control->compression_gain_db() :
+ 0;
public_submodules_->intelligibility_enhancer->SetCaptureNoiseEstimate(
- public_submodules_->noise_suppression->NoiseEstimate());
+ public_submodules_->noise_suppression->NoiseEstimate(), gain_db);
}
// Ensure that the stream delay was set before the call to the
@@ -898,7 +909,7 @@ int AudioProcessingImpl::ProcessReverseStreamLocked() {
ra->SplitIntoFrequencyBands();
}
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
public_submodules_->intelligibility_enhancer->ProcessRenderAudio(
ra->split_channels_f(kBand0To8kHz), capture_nonlocked_.split_rate,
ra->num_channels());
@@ -1146,16 +1157,16 @@ bool AudioProcessingImpl::fwd_analysis_needed() const {
}
bool AudioProcessingImpl::is_rev_processed() const {
- return constants_.intelligibility_enabled;
+ return capture_nonlocked_.intelligibility_enabled;
}
bool AudioProcessingImpl::rev_synthesis_needed() const {
return (is_rev_processed() &&
- formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz);
+ is_multi_band(formats_.rev_proc_format.sample_rate_hz()));
}
bool AudioProcessingImpl::rev_analysis_needed() const {
- return formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz &&
+ return is_multi_band(formats_.rev_proc_format.sample_rate_hz()) &&
(is_rev_processed() ||
public_submodules_->echo_cancellation
->is_enabled_render_side_query() ||
@@ -1211,7 +1222,7 @@ void AudioProcessingImpl::InitializeBeamformer() {
}
void AudioProcessingImpl::InitializeIntelligibility() {
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
public_submodules_->intelligibility_enhancer.reset(
new IntelligibilityEnhancer(capture_nonlocked_.split_rate,
render_.render_audio->num_channels(),
@@ -1392,8 +1403,10 @@ int AudioProcessingImpl::WriteInitMessage() {
formats_.api_format.reverse_input_stream().sample_rate_hz());
msg->set_output_sample_rate(
formats_.api_format.output_stream().sample_rate_hz());
- // TODO(ekmeyerson): Add reverse output fields to
- // debug_dump_.capture.event_msg.
+ msg->set_reverse_output_sample_rate(
+ formats_.api_format.reverse_output_stream().sample_rate_hz());
+ msg->set_num_reverse_output_channels(
+ formats_.api_format.reverse_output_stream().num_channels());
RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
&debug_dump_.num_bytes_left_for_log_,
@@ -1436,6 +1449,8 @@ int AudioProcessingImpl::WriteConfigMessage(bool forced) {
config.set_transient_suppression_enabled(
capture_.transient_suppressor_enabled);
+ config.set_intelligibility_enhancer_enabled(
+ capture_nonlocked_.intelligibility_enabled);
std::string experiments_description =
public_submodules_->echo_cancellation->GetExperimentsDescription();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
index 7323b85c78e..04ddabd1c7f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
@@ -276,16 +276,12 @@ class AudioProcessingImpl : public AudioProcessing {
// APM constants.
const struct ApmConstants {
- ApmConstants(int agc_startup_min_volume,
- bool use_experimental_agc,
- bool intelligibility_enabled)
+ ApmConstants(int agc_startup_min_volume, bool use_experimental_agc)
: // Format of processing streams at input/output call sites.
agc_startup_min_volume(agc_startup_min_volume),
- use_experimental_agc(use_experimental_agc),
- intelligibility_enabled(intelligibility_enabled) {}
+ use_experimental_agc(use_experimental_agc) {}
int agc_startup_min_volume;
bool use_experimental_agc;
- bool intelligibility_enabled;
} constants_;
struct ApmCaptureState {
@@ -325,11 +321,13 @@ class AudioProcessingImpl : public AudioProcessing {
} capture_ GUARDED_BY(crit_capture_);
struct ApmCaptureNonLockedState {
- ApmCaptureNonLockedState(bool beamformer_enabled)
+ ApmCaptureNonLockedState(bool beamformer_enabled,
+ bool intelligibility_enabled)
: fwd_proc_format(kSampleRate16kHz),
split_rate(kSampleRate16kHz),
stream_delay_ms(0),
- beamformer_enabled(beamformer_enabled) {}
+ beamformer_enabled(beamformer_enabled),
+ intelligibility_enabled(intelligibility_enabled) {}
// Only the rate and samples fields of fwd_proc_format_ are used because the
// forward processing number of channels is mutable and is tracked by the
// capture_audio_.
@@ -337,6 +335,7 @@ class AudioProcessingImpl : public AudioProcessing {
int split_rate;
int stream_delay_ms;
bool beamformer_enabled;
+ bool intelligibility_enabled;
} capture_nonlocked_;
struct ApmRenderState {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
index 948c5efd93f..ded75c86520 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
@@ -54,7 +54,12 @@ bool write_ref_data = false;
const google::protobuf::int32 kChannels[] = {1, 2};
const int kSampleRates[] = {8000, 16000, 32000, 48000};
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+// Android doesn't support 48kHz.
+const int kProcessSampleRates[] = {8000, 16000, 32000};
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
+#endif
enum StreamDirection { kForward = 0, kReverse };
@@ -2692,7 +2697,7 @@ INSTANTIATE_TEST_CASE_P(
std::tr1::make_tuple(16000, 32000, 32000, 32000, 25, 0),
std::tr1::make_tuple(16000, 32000, 16000, 32000, 25, 20),
std::tr1::make_tuple(16000, 16000, 48000, 16000, 40, 20),
- std::tr1::make_tuple(16000, 16000, 32000, 16000, 50, 20),
+ std::tr1::make_tuple(16000, 16000, 32000, 16000, 40, 20),
std::tr1::make_tuple(16000, 16000, 16000, 16000, 0, 0)));
#elif defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
@@ -2748,7 +2753,7 @@ INSTANTIATE_TEST_CASE_P(
std::tr1::make_tuple(16000, 32000, 32000, 32000, 25, 0),
std::tr1::make_tuple(16000, 32000, 16000, 32000, 25, 20),
std::tr1::make_tuple(16000, 16000, 48000, 16000, 35, 20),
- std::tr1::make_tuple(16000, 16000, 32000, 16000, 40, 20),
+ std::tr1::make_tuple(16000, 16000, 32000, 16000, 35, 20),
std::tr1::make_tuple(16000, 16000, 16000, 16000, 0, 0)));
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/debug.proto b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
index 1c025fbc72c..44177735e8c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/debug.proto
+++ b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
@@ -12,6 +12,8 @@ message Init {
optional int32 num_reverse_channels = 5;
optional int32 reverse_sample_rate = 6;
optional int32 output_sample_rate = 7;
+ optional int32 reverse_output_sample_rate = 8;
+ optional int32 num_reverse_output_channels = 9;
}
// May contain interleaved or deinterleaved data, but don't store both formats.
@@ -44,7 +46,7 @@ message Stream {
// Contains the configurations of various APM component. A Config message is
// added when any of the fields are changed.
message Config {
- // Next field number 18.
+ // Next field number 19.
// Acoustic echo canceler.
optional bool aec_enabled = 1;
optional bool aec_delay_agnostic_enabled = 2;
@@ -70,6 +72,8 @@ message Config {
// Semicolon-separated string containing experimental feature
// descriptions.
optional string experiments_description = 17;
+ // Intelligibility Enhancer
+ optional bool intelligibility_enhancer_enabled = 18;
}
message Event {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
index 97f69974dba..d7b27e99f3c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/system_wrappers/include/logging.h"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
index 3a6fcb116d5..2461f72ad35 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/gain_control_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/optional.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/agc/legacy/gain_control.h"
@@ -274,6 +275,11 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio,
return AudioProcessing::kNoError;
}
+int GainControlImpl::compression_gain_db() const {
+ rtc::CritScope cs(crit_capture_);
+ return compression_gain_db_;
+}
+
// TODO(ajm): ensure this is called under kAdaptiveAnalog.
int GainControlImpl::set_stream_analog_level(int level) {
rtc::CritScope cs(crit_capture_);
@@ -413,11 +419,6 @@ int GainControlImpl::set_compression_gain_db(int gain) {
return Configure();
}
-int GainControlImpl::compression_gain_db() const {
- rtc::CritScope cs(crit_capture_);
- return compression_gain_db_;
-}
-
int GainControlImpl::enable_limiter(bool enable) {
{
rtc::CritScope cs(crit_capture_);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
index 9498ac60b54..2459ce3b4b7 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
@@ -51,6 +51,8 @@ class GainControlImpl : public GainControl {
// Reads render side data that has been queued on the render call.
void ReadQueuedRenderData();
+ int compression_gain_db() const override;
+
private:
class GainController;
@@ -61,7 +63,6 @@ class GainControlImpl : public GainControl {
int set_target_level_dbfs(int level) override;
int target_level_dbfs() const override;
int set_compression_gain_db(int gain) override;
- int compression_gain_db() const override;
int enable_limiter(bool enable) override;
int set_analog_level_limits(int minimum, int maximum) override;
int analog_level_minimum() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
index 0c93a984b24..2f8e48f82da 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
@@ -160,9 +160,7 @@ struct Beamforming {
const SphericalPointf target_direction;
};
-// Use to enable intelligibility enhancer in audio processing. Must be provided
-// though the constructor. It will have no impact if used with
-// AudioProcessing::SetExtraOptions().
+// Use to enable intelligibility enhancer in audio processing.
//
// Note: If enabled and the reverse stream has more than one output channel,
// the reverse stream will become an upmixed mono signal.
@@ -685,7 +683,7 @@ class EchoCancellation {
// (Pre non-linear processing suppression) A_NLP = 10log_10(P_echo / P_a)
AudioProcessing::Statistic a_nlp;
- // Fraction of time that the AEC linear filter is divergent, in a 0.5-second
+ // Fraction of time that the AEC linear filter is divergent, in a 1-second
// non-overlapped aggregation window.
float divergent_filter_fraction;
};
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
index de36b7a8bc7..ae7f9119213 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
@@ -38,6 +38,8 @@ const float kDecayRate = 0.994f; // Power estimation decay rate.
const float kMaxRelativeGainChange = 0.006f;
const float kRho = 0.0004f; // Default production and interpretation SNR.
const float kPowerNormalizationFactor = 1.f / (1 << 30);
+const float kMaxActiveSNR = 128.f; // 21dB
+const float kMinInactiveSNR = 32.f; // 15dB
// Returns dot product of vectors |a| and |b| with size |length|.
float DotProduct(const float* a, const float* b, size_t length) {
@@ -84,6 +86,8 @@ IntelligibilityEnhancer::IntelligibilityEnhancer(int sample_rate_hz,
audio_s16_(chunk_length_),
chunks_since_voice_(kSpeechOffsetDelay),
is_speech_(false),
+ snr_(kMaxActiveSNR),
+ is_active_(false),
noise_estimation_buffer_(num_noise_bins),
noise_estimation_queue_(kMaxNumNoiseEstimatesToBuffer,
std::vector<float>(num_noise_bins),
@@ -105,8 +109,12 @@ IntelligibilityEnhancer::IntelligibilityEnhancer(int sample_rate_hz,
}
void IntelligibilityEnhancer::SetCaptureNoiseEstimate(
- std::vector<float> noise) {
+ std::vector<float> noise, int gain_db) {
RTC_DCHECK_EQ(noise.size(), num_noise_bins_);
+ const float gain = std::pow(10.f, gain_db / 20.f);
+ for (auto& bin : noise) {
+ bin *= gain;
+ }
// Disregarding return value since buffer overflow is acceptable, because it
// is not critical to get each noise estimate.
if (noise_estimation_queue_.Insert(&noise)) {
@@ -135,29 +143,55 @@ void IntelligibilityEnhancer::ProcessAudioBlock(
if (is_speech_) {
clear_power_estimator_.Step(in_block[0]);
}
- const std::vector<float>& clear_power = clear_power_estimator_.power();
- const std::vector<float>& noise_power = noise_power_estimator_.power();
- MapToErbBands(clear_power.data(), render_filter_bank_,
- filtered_clear_pow_.data());
- MapToErbBands(noise_power.data(), capture_filter_bank_,
- filtered_noise_pow_.data());
- SolveForGainsGivenLambda(kLambdaTop, start_freq_, gains_eq_.data());
- const float power_target = std::accumulate(
- filtered_clear_pow_.data(), filtered_clear_pow_.data() + bank_size_, 0.f);
- const float power_top =
- DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
- SolveForGainsGivenLambda(kLambdaBot, start_freq_, gains_eq_.data());
- const float power_bot =
- DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
- if (power_target >= power_bot && power_target <= power_top) {
- SolveForLambda(power_target);
- UpdateErbGains();
- } // Else experiencing power underflow, so do nothing.
+ SnrBasedEffectActivation();
+ if (is_active_) {
+ MapToErbBands(clear_power_estimator_.power().data(), render_filter_bank_,
+ filtered_clear_pow_.data());
+ MapToErbBands(noise_power_estimator_.power().data(), capture_filter_bank_,
+ filtered_noise_pow_.data());
+ SolveForGainsGivenLambda(kLambdaTop, start_freq_, gains_eq_.data());
+ const float power_target = std::accumulate(
+ filtered_clear_pow_.data(),
+ filtered_clear_pow_.data() + bank_size_,
+ 0.f);
+ const float power_top =
+ DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
+ SolveForGainsGivenLambda(kLambdaBot, start_freq_, gains_eq_.data());
+ const float power_bot =
+ DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
+ if (power_target >= power_bot && power_target <= power_top) {
+ SolveForLambda(power_target);
+ UpdateErbGains();
+ } // Else experiencing power underflow, so do nothing.
+ }
for (size_t i = 0; i < in_channels; ++i) {
gain_applier_.Apply(in_block[i], out_block[i]);
}
}
+void IntelligibilityEnhancer::SnrBasedEffectActivation() {
+ const float* clear_psd = clear_power_estimator_.power().data();
+ const float* noise_psd = noise_power_estimator_.power().data();
+ const float clear_power =
+ std::accumulate(clear_psd, clear_psd + freqs_, 0.f);
+ const float noise_power =
+ std::accumulate(noise_psd, noise_psd + freqs_, 0.f);
+ snr_ = kDecayRate * snr_ + (1.f - kDecayRate) * clear_power /
+ (noise_power + std::numeric_limits<float>::epsilon());
+ if (is_active_) {
+ if (snr_ > kMaxActiveSNR) {
+ is_active_ = false;
+ // Set the target gains to unity.
+ float* gains = gain_applier_.target();
+ for (size_t i = 0; i < freqs_; ++i) {
+ gains[i] = 1.f;
+ }
+ }
+ } else {
+ is_active_ = snr_ < kMinInactiveSNR;
+ }
+}
+
void IntelligibilityEnhancer::SolveForLambda(float power_target) {
const float kConvergeThresh = 0.001f; // TODO(ekmeyerson): Find best values
const int kMaxIters = 100; // for these, based on experiments.
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
index 14132129349..63ae80e2c44 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
@@ -36,7 +36,7 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
size_t num_noise_bins);
// Sets the capture noise magnitude spectrum estimate.
- void SetCaptureNoiseEstimate(std::vector<float> noise);
+ void SetCaptureNoiseEstimate(std::vector<float> noise, int gain_db);
// Reads chunk of speech in time domain and updates with modified signal.
void ProcessRenderAudio(float* const* audio,
@@ -56,6 +56,12 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
private:
FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest, TestErbCreation);
FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest, TestSolveForGains);
+ FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest,
+ TestNoiseGainHasExpectedResult);
+
+ // Updates the SNR estimation and enables or disables this component using a
+ // hysteresis.
+ void SnrBasedEffectActivation();
// Bisection search for optimal |lambda|.
void SolveForLambda(float power_target);
@@ -103,6 +109,8 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
std::vector<int16_t> audio_s16_;
size_t chunks_since_voice_;
bool is_speech_;
+ float snr_;
+ bool is_active_;
std::vector<float> noise_estimation_buffer_;
SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
index 080e228cb8e..30035ab16ed 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
@@ -237,7 +237,7 @@ void ProcessOneFrame(int sample_rate_hz,
noise_suppressor->ProcessCaptureAudio(capture_audio_buffer);
intelligibility_enhancer->SetCaptureNoiseEstimate(
- noise_suppressor->NoiseEstimate());
+ noise_suppressor->NoiseEstimate(), 0);
if (sample_rate_hz > AudioProcessing::kSampleRate16kHz) {
render_audio_buffer->MergeFrequencyBands();
@@ -311,12 +311,17 @@ void RunBitexactnessTest(int sample_rate_hz,
output_reference, render_output, kElementErrorBound));
}
+float float_rand() {
+ return std::rand() * 2.f / RAND_MAX - 1;
+}
+
} // namespace
class IntelligibilityEnhancerTest : public ::testing::Test {
protected:
IntelligibilityEnhancerTest()
: clear_data_(kSamples), noise_data_(kSamples), orig_data_(kSamples) {
+ std::srand(1);
enh_.reset(
new IntelligibilityEnhancer(kSampleRate, kNumChannels, kNumNoiseBins));
}
@@ -352,8 +357,6 @@ TEST_F(IntelligibilityEnhancerTest, TestRenderUpdate) {
std::fill(orig_data_.begin(), orig_data_.end(), 0.f);
std::fill(clear_data_.begin(), clear_data_.end(), 0.f);
EXPECT_FALSE(CheckUpdate());
- std::srand(1);
- auto float_rand = []() { return std::rand() * 2.f / RAND_MAX - 1; };
std::generate(noise_data_.begin(), noise_data_.end(), float_rand);
EXPECT_FALSE(CheckUpdate());
std::generate(clear_data_.begin(), clear_data_.end(), float_rand);
@@ -403,6 +406,29 @@ TEST_F(IntelligibilityEnhancerTest, TestSolveForGains) {
}
}
+TEST_F(IntelligibilityEnhancerTest, TestNoiseGainHasExpectedResult) {
+ const int kGainDB = 6;
+ const float kGainFactor = std::pow(10.f, kGainDB / 20.f);
+ const float kTolerance = 0.003f;
+ std::vector<float> noise(kNumNoiseBins);
+ std::vector<float> noise_psd(kNumNoiseBins);
+ std::generate(noise.begin(), noise.end(), float_rand);
+ for (size_t i = 0; i < kNumNoiseBins; ++i) {
+ noise_psd[i] = kGainFactor * kGainFactor * noise[i] * noise[i];
+ }
+ float* clear_cursor = clear_data_.data();
+ for (size_t i = 0; i < kNumFramesToProcess; ++i) {
+ enh_->SetCaptureNoiseEstimate(noise, kGainDB);
+ enh_->ProcessRenderAudio(&clear_cursor, kSampleRate, kNumChannels);
+ }
+ const std::vector<float>& estimated_psd =
+ enh_->noise_power_estimator_.power();
+ for (size_t i = 0; i < kNumNoiseBins; ++i) {
+ EXPECT_LT(std::abs(estimated_psd[i] - noise_psd[i]) / noise_psd[i],
+ kTolerance);
+ }
+}
+
TEST(IntelligibilityEnhancerBitExactnessTest, DISABLED_Mono8kHz) {
const float kOutputReference[] = {-0.001892f, -0.003296f, -0.001953f};
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
index 3a9433b4768..3675f66cafe 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
@@ -22,16 +22,15 @@ namespace intelligibility {
namespace {
+const float kMinFactor = 0.01f;
+const float kMaxFactor = 1000.f;
+
// Return |current| changed towards |target|, with the relative change being at
// most |limit|.
float UpdateFactor(float target, float current, float limit) {
float gain = target / (current + std::numeric_limits<float>::epsilon());
- if (gain < 1.f - limit) {
- gain = 1.f - limit;
- } else if (gain > 1.f + limit) {
- gain = 1.f + limit;
- }
- return current * gain + std::numeric_limits<float>::epsilon();
+ gain = std::min(std::max(gain, 1.f - limit), 1.f + limit);
+ return std::min(std::max(current * gain, kMinFactor), kMaxFactor);;
}
} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
index 64ccfd96efc..abd10d85165 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
@@ -64,7 +64,7 @@ void void_main(int argc, char* argv[]) {
capture_audio.CopyFrom(noise_buf.channels(), stream_config);
ns.AnalyzeCaptureAudio(&capture_audio);
ns.ProcessCaptureAudio(&capture_audio);
- enh.SetCaptureNoiseEstimate(ns.NoiseEstimate());
+ enh.SetCaptureNoiseEstimate(ns.NoiseEstimate(), 0);
enh.ProcessRenderAudio(in_buf.channels(), in_file.sample_rate(),
in_file.num_channels());
Interleave(in_buf.channels(), in_buf.num_frames(), in_buf.num_channels(),
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h
deleted file mode 100644
index b062913be28..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
-
-#include <stdio.h>
-
-#include "webrtc/modules/audio_processing/logging/aec_logging_file_handling.h"
-
-// To enable AEC logging, invoke GYP with -Daec_debug_dump=1.
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-// Dumps a wav data to file.
-#define RTC_AEC_DEBUG_WAV_WRITE(file, data, num_samples) \
- do { \
- rtc_WavWriteSamples(file, data, num_samples); \
- } while (0)
-
-// (Re)opens a wav file for writing using the specified sample rate.
-#define RTC_AEC_DEBUG_WAV_REOPEN(name, instance_index, process_rate, \
- sample_rate, wav_file) \
- do { \
- WebRtcAec_ReopenWav(name, instance_index, process_rate, sample_rate, \
- wav_file); \
- } while (0)
-
-// Closes a wav file.
-#define RTC_AEC_DEBUG_WAV_CLOSE(wav_file) \
- do { \
- rtc_WavClose(wav_file); \
- } while (0)
-
-// Dumps a raw data to file.
-#define RTC_AEC_DEBUG_RAW_WRITE(file, data, data_size) \
- do { \
- (void) fwrite(data, data_size, 1, file); \
- } while (0)
-
-// Dumps a raw scalar int32 to file.
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
- do { \
- int32_t value_to_store = data; \
- (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
- } while (0)
-
-// Dumps a raw scalar double to file.
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
- do { \
- double value_to_store = data; \
- (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
- } while (0)
-
-// Opens a raw data file for writing using the specified sample rate.
-#define RTC_AEC_DEBUG_RAW_OPEN(name, instance_counter, file) \
- do { \
- WebRtcAec_RawFileOpen(name, instance_counter, file); \
- } while (0)
-
-// Closes a raw data file.
-#define RTC_AEC_DEBUG_RAW_CLOSE(file) \
- do { \
- fclose(file); \
- } while (0)
-
-#else // RTC_AEC_DEBUG_DUMP
-#define RTC_AEC_DEBUG_WAV_WRITE(file, data, num_samples) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_WAV_REOPEN(wav_file, name, instance_index, process_rate, \
- sample_rate) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_WAV_CLOSE(wav_file) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE(file, data, data_size) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_OPEN(file, name, instance_counter) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_CLOSE(file) \
- do { \
- } while (0)
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc
deleted file mode 100644
index 3a434714e1e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_processing/logging/aec_logging_file_handling.h"
-
-#include <stdint.h>
-#include <stdio.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/stringutils.h"
-#include "webrtc/common_audio/wav_file.h"
-#include "webrtc/typedefs.h"
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-void WebRtcAec_ReopenWav(const char* name,
- int instance_index,
- int process_rate,
- int sample_rate,
- rtc_WavWriter** wav_file) {
- if (*wav_file) {
- if (rtc_WavSampleRate(*wav_file) == sample_rate)
- return;
- rtc_WavClose(*wav_file);
- }
- char filename[64];
- int written = rtc::sprintfn(filename, sizeof(filename), "%s%d-%d.wav", name,
- instance_index, process_rate);
-
- // Ensure there was no buffer output error.
- RTC_DCHECK_GE(written, 0);
- // Ensure that the buffer size was sufficient.
- RTC_DCHECK_LT(static_cast<size_t>(written), sizeof(filename));
-
- *wav_file = rtc_WavOpen(filename, sample_rate, 1);
-}
-
-void WebRtcAec_RawFileOpen(const char* name, int instance_index, FILE** file) {
- char filename[64];
- int written = rtc::sprintfn(filename, sizeof(filename), "%s_%d.dat", name,
- instance_index);
-
- // Ensure there was no buffer output error.
- RTC_DCHECK_GE(written, 0);
- // Ensure that the buffer size was sufficient.
- RTC_DCHECK_LT(static_cast<size_t>(written), sizeof(filename));
-
- *file = fopen(filename, "wb");
-}
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h
deleted file mode 100644
index 5ec83948726..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
-
-#include <stdio.h>
-
-#include "webrtc/common_audio/wav_file.h"
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-// Opens a new Wav file for writing. If it was already open with a different
-// sample frequency, it closes it first.
-void WebRtcAec_ReopenWav(const char* name,
- int instance_index,
- int process_rate,
- int sample_rate,
- rtc_WavWriter** wav_file);
-
-// Opens dumpfile with instance-specific filename.
-void WebRtcAec_RawFileOpen(const char* name, int instance_index, FILE** file);
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc
new file mode 100644
index 00000000000..491196e0972
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
+
+#include <sstream>
+
+#include "webrtc/base/stringutils.h"
+
+// Check to verify that the define is properly set.
+#if !defined(WEBRTC_AEC_DEBUG_DUMP) || \
+ (WEBRTC_AEC_DEBUG_DUMP != 0 && WEBRTC_AEC_DEBUG_DUMP != 1)
+#error "Set WEBRTC_AEC_DEBUG_DUMP to either 0 or 1"
+#endif
+
+namespace webrtc {
+
+namespace {
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+std::string FormFileName(const char* name,
+ int instance_index,
+ int reinit_index,
+ const std::string& suffix) {
+ std::stringstream ss;
+ ss << name << "_" << instance_index << "-" << reinit_index << suffix;
+ return ss.str();
+}
+#endif
+
+} // namespace
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+FILE* ApmDataDumper::GetRawFile(const char* name) {
+ std::string filename =
+ FormFileName(name, instance_index_, recording_set_index_, ".dat");
+ auto& f = raw_files_[filename];
+ if (!f) {
+ f.reset(fopen(filename.c_str(), "wb"));
+ }
+ return f.get();
+}
+
+WavWriter* ApmDataDumper::GetWavFile(const char* name,
+ int sample_rate_hz,
+ int num_channels) {
+ std::string filename =
+ FormFileName(name, instance_index_, recording_set_index_, ".wav");
+ auto& f = wav_files_[filename];
+ if (!f) {
+ f.reset(new WavWriter(filename.c_str(), sample_rate_hz, num_channels));
+ }
+ return f.get();
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h
new file mode 100644
index 00000000000..93232b7c9dc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
+
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_audio/wav_file.h"
+
+// Check to verify that the define is properly set.
+#if !defined(WEBRTC_AEC_DEBUG_DUMP) || \
+ (WEBRTC_AEC_DEBUG_DUMP != 0 && WEBRTC_AEC_DEBUG_DUMP != 1)
+#error "Set WEBRTC_AEC_DEBUG_DUMP to either 0 or 1"
+#endif
+
+namespace webrtc {
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+// Functor used to use as a custom deleter in the map of file pointers to raw
+// files.
+struct RawFileCloseFunctor {
+ void operator()(FILE* f) const { fclose(f); }
+};
+#endif
+
+// Class that handles dumping of variables into files.
+class ApmDataDumper {
+ public:
+// Constructor that takes an instance index that may
+// be used to distinguish data dumped from different
+// instances of the code.
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ explicit ApmDataDumper(int instance_index)
+ : instance_index_(instance_index) {}
+#else
+ explicit ApmDataDumper(int instance_index) {}
+#endif
+
+ // Reinitializes the data dumping such that new versions
+ // of all files being dumped to are created.
+ void InitiateNewSetOfRecordings() {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ ++recording_set_index_;
+#endif
+ }
+
+ // Methods for performing dumping of data of various types into
+ // various formats.
+ void DumpRaw(const char* name, int v_length, const float* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const float> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpRaw(const char* name, int v_length, const int16_t* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const int16_t> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpRaw(const char* name, int v_length, const int32_t* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const int32_t> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpWav(const char* name,
+ int v_length,
+ const float* v,
+ int sample_rate_hz,
+ int num_channels) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ WavWriter* file = GetWavFile(name, sample_rate_hz, num_channels);
+ file->WriteSamples(v, v_length);
+#endif
+ }
+
+ private:
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ const int instance_index_;
+ int recording_set_index_ = 0;
+ std::unordered_map<std::string, std::unique_ptr<FILE, RawFileCloseFunctor>>
+ raw_files_;
+ std::unordered_map<std::string, std::unique_ptr<WavWriter>> wav_files_;
+
+ FILE* GetRawFile(const char* name);
+ WavWriter* GetWavFile(const char* name, int sample_rate_hz, int num_channels);
+#endif
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(ApmDataDumper);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
index 4344c56fcc6..e1c9fdcd019 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#if defined(WEBRTC_NS_FLOAT)
#include "webrtc/modules/audio_processing/ns/noise_suppression.h"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
index 25f16d26abc..94b6449776f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
@@ -19,7 +19,7 @@
#include "webrtc/modules/audio_processing/ns/nsx_core.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
/* Tables are defined in ARM assembly files. */
extern const int16_t WebRtcNsx_kLogTable[9];
extern const int16_t WebRtcNsx_kCounterDiv[201];
@@ -65,7 +65,7 @@ static const int16_t WebRtcNsx_kLogTableFrac[256] = {
237, 238, 238, 239, 240, 241, 241, 242, 243, 244, 244, 245, 246, 247, 247,
248, 249, 249, 250, 251, 252, 252, 253, 254, 255, 255
};
-#endif // WEBRTC_DETECT_NEON || WEBRTC_HAS_NEON
+#endif // WEBRTC_HAS_NEON
// Skip first frequency bins during estimation. (0 <= value < 64)
static const size_t kStartBand = 5;
@@ -557,7 +557,7 @@ AnalysisUpdate WebRtcNsx_AnalysisUpdate;
Denormalize WebRtcNsx_Denormalize;
NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
// Initialize function pointers for ARM Neon platform.
static void WebRtcNsx_InitNeon(void) {
WebRtcNsx_NoiseEstimation = WebRtcNsx_NoiseEstimationNeon;
@@ -762,12 +762,7 @@ int32_t WebRtcNsx_InitCore(NoiseSuppressionFixedC* inst, uint32_t fs) {
WebRtcNsx_Denormalize = DenormalizeC;
WebRtcNsx_NormalizeRealBuffer = NormalizeRealBufferC;
-#ifdef WEBRTC_DETECT_NEON
- uint64_t features = WebRtc_GetCPUFeaturesARM();
- if ((features & kCPUFeatureNEON) != 0) {
- WebRtcNsx_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcNsx_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
index f463dbbe1a2..d1754f31eae 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
@@ -215,7 +215,7 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
uint32_t* priorLocSnr,
uint32_t* postLocSnr);
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for ARM Neon platforms
// are declared below and defined in file nsx_core_neon.c.
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
index da7aa3d5dbe..213320d38c8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
@@ -96,8 +96,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
}
tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
// compute indicator function: sigmoid map
- tableIndex = (int16_t)(tmp32no1 >> 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
+ if (tmp32no1 < (16 << 14) && tmp32no1 >= 0) {
+ tableIndex = (int16_t)(tmp32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
@@ -128,8 +128,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
// FLOAT code
// indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
// (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
@@ -175,8 +175,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
/* FLOAT code
indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
*/
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
index 7688d82d78d..3922308c7c8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
@@ -131,8 +131,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
}
tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
// compute indicator function: sigmoid map
- tableIndex = (int16_t)(tmp32no1 >> 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
+ if (tmp32no1 < (16 << 14) && tmp32no1 >= 0) {
+ tableIndex = (int16_t)(tmp32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
@@ -163,8 +163,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
// FLOAT code
// indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
// (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
@@ -210,8 +210,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
/* FLOAT code
indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
*/
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h b/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
index f7cde598210..76d5e0edb8f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
@@ -16,11 +16,11 @@
#include <memory>
#include <vector>
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
@@ -33,9 +33,9 @@ namespace webrtc {
// Holds a few statistics about a series of TickIntervals.
struct TickIntervalStats {
TickIntervalStats() : min(std::numeric_limits<int64_t>::max()) {}
- TickInterval sum;
- TickInterval max;
- TickInterval min;
+ int64_t sum;
+ int64_t max;
+ int64_t min;
};
// Interface for processing an input file with an AudioProcessing instance and
@@ -60,10 +60,10 @@ class AudioFileProcessor {
class ScopedTimer {
public:
explicit ScopedTimer(TickIntervalStats* proc_time)
- : proc_time_(proc_time), start_time_(TickTime::Now()) {}
+ : proc_time_(proc_time), start_time_(rtc::TimeNanos()) {}
~ScopedTimer() {
- TickInterval interval = TickTime::Now() - start_time_;
+ int64_t interval = rtc::TimeNanos() - start_time_;
proc_time_->sum += interval;
proc_time_->max = std::max(proc_time_->max, interval);
proc_time_->min = std::min(proc_time_->min, interval);
@@ -71,7 +71,7 @@ class AudioFileProcessor {
private:
TickIntervalStats* const proc_time_;
- TickTime start_time_;
+ int64_t start_time_;
};
TickIntervalStats* mutable_proc_time() { return &proc_time_; }
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
index 41e45bfdc69..33790d837f4 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
@@ -25,7 +25,6 @@
#include "webrtc/modules/audio_processing/test/audio_file_processor.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
namespace {
@@ -167,13 +166,14 @@ int main(int argc, char* argv[]) {
if (FLAGS_perf) {
const auto& proc_time = processor->proc_time();
- int64_t exec_time_us = proc_time.sum.Microseconds();
+ int64_t exec_time_us = proc_time.sum / rtc::kNumNanosecsPerMicrosec;
printf(
"\nExecution time: %.3f s, File time: %.2f s\n"
"Time per chunk (mean, max, min):\n%.0f us, %.0f us, %.0f us\n",
exec_time_us * 1e-6, num_chunks * 1.f / kChunksPerSecond,
- exec_time_us * 1.f / num_chunks, 1.f * proc_time.max.Microseconds(),
- 1.f * proc_time.min.Microseconds());
+ exec_time_us * 1.f / num_chunks,
+ 1.f * proc_time.max / rtc::kNumNanosecsPerMicrosec,
+ 1.f * proc_time.min / rtc::kNumNanosecsPerMicrosec);
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
index fc127e610ed..fa76747c2be 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
@@ -187,6 +187,10 @@ void DebugDumpReplayer::MaybeRecreateApm(const audioproc::Config& msg) {
config.Set<ExtendedFilter>(
new ExtendedFilter(msg.aec_extended_filter_enabled()));
+ RTC_CHECK(msg.has_intelligibility_enhancer_enabled());
+ config.Set<Intelligibility>(
+ new Intelligibility(msg.intelligibility_enhancer_enabled()));
+
// We only create APM once, since changes on these fields should not
// happen in current implementation.
if (!apm_.get()) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
index 3acb69444d9..64d659ea500 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
@@ -10,6 +10,7 @@
#include <stddef.h> // size_t
+#include <memory>
#include <string>
#include <vector>
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc b/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
index 185bc142d50..527e0a1e3e5 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
@@ -19,13 +19,13 @@
#include <memory>
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/perf_test.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
@@ -562,7 +562,7 @@ void void_main(int argc, char* argv[]) {
int reverse_count = 0;
int primary_count = 0;
int near_read_bytes = 0;
- TickInterval acc_ticks;
+ int64_t acc_nanos = 0;
AudioFrame far_frame;
AudioFrame near_frame;
@@ -573,8 +573,8 @@ void void_main(int argc, char* argv[]) {
int8_t stream_has_voice = 0;
float ns_speech_prob = 0.0f;
- TickTime t0 = TickTime::Now();
- TickTime t1 = t0;
+ int64_t t0 = rtc::TimeNanos();
+ int64_t t1 = t0;
int64_t max_time_us = 0;
int64_t max_time_reverse_us = 0;
int64_t min_time_us = 1e6;
@@ -676,7 +676,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
if (msg.has_data()) {
@@ -692,14 +692,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_reverse_us) {
- max_time_reverse_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_reverse_us) {
+ max_time_reverse_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_reverse_us) {
- min_time_reverse_us = tick_diff.Microseconds();
+ if (diff_us < min_time_reverse_us) {
+ min_time_reverse_us = diff_us;
}
}
@@ -737,7 +738,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
ASSERT_EQ(apm->kNoError,
@@ -795,14 +796,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_us) {
- max_time_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_us) {
+ max_time_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_us) {
- min_time_us = tick_diff.Microseconds();
+ if (diff_us < min_time_us) {
+ min_time_us = diff_us;
}
}
@@ -925,21 +927,22 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
ASSERT_EQ(apm->kNoError,
apm->ProcessReverseStream(&far_frame));
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_reverse_us) {
- max_time_reverse_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_reverse_us) {
+ max_time_reverse_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_reverse_us) {
- min_time_reverse_us = tick_diff.Microseconds();
+ if (diff_us < min_time_reverse_us) {
+ min_time_reverse_us = diff_us;
}
}
@@ -982,7 +985,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
const int capture_level_in = capture_level;
@@ -1030,14 +1033,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_us) {
- max_time_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_us) {
+ max_time_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_us) {
- min_time_us = tick_diff.Microseconds();
+ if (diff_us < min_time_us) {
+ min_time_us = diff_us;
}
}
@@ -1130,7 +1134,7 @@ void void_main(int argc, char* argv[]) {
if (perf_testing) {
if (primary_count > 0) {
- int64_t exec_time = acc_ticks.Milliseconds();
+ int64_t exec_time = acc_nanos / rtc::kNumNanosecsPerMillisec;
printf("\nTotal time: %.3f s, file time: %.2f s\n",
exec_time * 0.001, primary_count * 0.01);
printf("Time per frame: %.3f ms (average), %.3f ms (max),"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc b/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
index fbb8e85fee7..f5c0700b3f8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
@@ -252,6 +252,7 @@ int do_main(int argc, char* argv[]) {
PRINT_CONFIG(ns_enabled);
PRINT_CONFIG(ns_level);
PRINT_CONFIG(transient_suppression_enabled);
+ PRINT_CONFIG(intelligibility_enhancer_enabled);
if (msg.has_experiments_description()) {
fprintf(settings_file, " experiments_description: %s\n",
msg.experiments_description().c_str());
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc
index 02df75a1010..56bdde890c9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc
@@ -13,6 +13,7 @@
#include <assert.h>
#include <stdlib.h>
#include <string.h>
+#include <algorithm>
// Number of right shifts for scaling is linearly depending on number of bits in
// the far-end binary spectrum.
@@ -276,7 +277,8 @@ BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
if (history_size > 1) {
// Sanity conditions fulfilled.
- self = malloc(sizeof(BinaryDelayEstimatorFarend));
+ self = static_cast<BinaryDelayEstimatorFarend*>(
+ malloc(sizeof(BinaryDelayEstimatorFarend)));
}
if (self == NULL) {
return NULL;
@@ -296,11 +298,12 @@ int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self,
int history_size) {
assert(self != NULL);
// (Re-)Allocate memory for history buffers.
- self->binary_far_history =
+ self->binary_far_history = static_cast<uint32_t*>(
realloc(self->binary_far_history,
- history_size * sizeof(*self->binary_far_history));
- self->far_bit_counts = realloc(self->far_bit_counts,
- history_size * sizeof(*self->far_bit_counts));
+ history_size * sizeof(*self->binary_far_history)));
+ self->far_bit_counts = static_cast<int*>(
+ realloc(self->far_bit_counts,
+ history_size * sizeof(*self->far_bit_counts)));
if ((self->binary_far_history == NULL) || (self->far_bit_counts == NULL)) {
history_size = 0;
}
@@ -404,7 +407,8 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
if ((farend != NULL) && (max_lookahead >= 0)) {
// Sanity conditions fulfilled.
- self = malloc(sizeof(BinaryDelayEstimator));
+ self = static_cast<BinaryDelayEstimator*>(
+ malloc(sizeof(BinaryDelayEstimator)));
}
if (self == NULL) {
return NULL;
@@ -422,8 +426,8 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
self->mean_bit_counts = NULL;
self->bit_counts = NULL;
self->histogram = NULL;
- self->binary_near_history =
- malloc((max_lookahead + 1) * sizeof(*self->binary_near_history));
+ self->binary_near_history = static_cast<uint32_t*>(
+ malloc((max_lookahead + 1) * sizeof(*self->binary_near_history)));
if (self->binary_near_history == NULL ||
WebRtc_AllocateHistoryBufferMemory(self, farend->history_size) == 0) {
WebRtc_FreeBinaryDelayEstimator(self);
@@ -444,13 +448,13 @@ int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self,
// The extra array element in |mean_bit_counts| and |histogram| is a dummy
// element only used while |last_delay| == -2, i.e., before we have a valid
// estimate.
- self->mean_bit_counts =
+ self->mean_bit_counts = static_cast<int32_t*>(
realloc(self->mean_bit_counts,
- (history_size + 1) * sizeof(*self->mean_bit_counts));
- self->bit_counts =
- realloc(self->bit_counts, history_size * sizeof(*self->bit_counts));
- self->histogram =
- realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram));
+ (history_size + 1) * sizeof(*self->mean_bit_counts)));
+ self->bit_counts = static_cast<int32_t*>(
+ realloc(self->bit_counts, history_size * sizeof(*self->bit_counts)));
+ self->histogram = static_cast<float*>(
+ realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram)));
if ((self->mean_bit_counts == NULL) ||
(self->bit_counts == NULL) ||
@@ -616,13 +620,10 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
(value_best_candidate < self->last_delay_probability)));
// Check for nonstationary farend signal.
- int non_stationary_farend = 0;
- for (i = 0; i < self->history_size; ++i) {
- if (self->farend->far_bit_counts[i] > 0) {
- non_stationary_farend = 1;
- break;
- }
- }
+ const bool non_stationary_farend =
+ std::any_of(self->farend->far_bit_counts,
+ self->farend->far_bit_counts + self->history_size,
+ [](int a) { return a > 0; });
if (non_stationary_farend) {
// Only update the validation statistics when the farend is nonstationary
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
index 4ebe0e61289..3e46763a6a2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -10,11 +10,9 @@
#include "testing/gtest/include/gtest/gtest.h"
-extern "C" {
#include "webrtc/modules/audio_processing/utility/delay_estimator.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_internal.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-}
#include "webrtc/typedefs.h"
namespace {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc
index b5448bc5bde..75c7abea776 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/audio_processing/utility/delay_estimator.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_internal.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
// Only bit |kBandFirst| through bit |kBandLast| are processed and
// |kBandFirst| - |kBandLast| must be < 32.
@@ -144,10 +143,11 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) {
// Check if the sub band used in the delay estimation is small enough to fit
// the binary spectra in a uint32_t.
- COMPILE_ASSERT(kBandLast - kBandFirst < 32);
+ static_assert(kBandLast - kBandFirst < 32, "");
if (spectrum_size >= kBandLast) {
- self = malloc(sizeof(DelayEstimatorFarend));
+ self = static_cast<DelayEstimatorFarend*>(
+ malloc(sizeof(DelayEstimatorFarend)));
}
if (self != NULL) {
@@ -158,7 +158,8 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) {
memory_fail |= (self->binary_farend == NULL);
// Allocate memory for spectrum buffers.
- self->mean_far_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+ self->mean_far_spectrum =
+ static_cast<SpectrumType*>(malloc(spectrum_size * sizeof(SpectrumType)));
memory_fail |= (self->mean_far_spectrum == NULL);
self->spectrum_size = spectrum_size;
@@ -275,7 +276,7 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
DelayEstimatorFarend* farend = (DelayEstimatorFarend*) farend_handle;
if (farend_handle != NULL) {
- self = malloc(sizeof(DelayEstimator));
+ self = static_cast<DelayEstimator*>(malloc(sizeof(DelayEstimator)));
}
if (self != NULL) {
@@ -287,8 +288,8 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
memory_fail |= (self->binary_handle == NULL);
// Allocate memory for spectrum buffers.
- self->mean_near_spectrum = malloc(farend->spectrum_size *
- sizeof(SpectrumType));
+ self->mean_near_spectrum = static_cast<SpectrumType*>(
+ malloc(farend->spectrum_size * sizeof(SpectrumType)));
memory_fail |= (self->mean_near_spectrum == NULL);
self->spectrum_size = farend->spectrum_size;
@@ -328,7 +329,7 @@ int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) {
}
int WebRtc_set_history_size(void* handle, int history_size) {
- DelayEstimator* self = handle;
+ DelayEstimator* self = static_cast<DelayEstimator*>(handle);
if ((self == NULL) || (history_size <= 1)) {
return -1;
@@ -337,7 +338,7 @@ int WebRtc_set_history_size(void* handle, int history_size) {
}
int WebRtc_history_size(const void* handle) {
- const DelayEstimator* self = handle;
+ const DelayEstimator* self = static_cast<const DelayEstimator*>(handle);
if (self == NULL) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
index 674a5197a87..5a0d37c2747 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
index bf608789d5d..09652d84194 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -83,6 +83,10 @@ BitrateController* BitrateController::CreateBitrateController(
return new BitrateControllerImpl(clock, observer);
}
+BitrateController* BitrateController::CreateBitrateController(Clock* clock) {
+ return new BitrateControllerImpl(clock, nullptr);
+}
+
BitrateControllerImpl::BitrateControllerImpl(Clock* clock,
BitrateObserver* observer)
: clock_(clock),
@@ -94,8 +98,8 @@ BitrateControllerImpl::BitrateControllerImpl(Clock* clock,
last_fraction_loss_(0),
last_rtt_ms_(0),
last_reserved_bitrate_bps_(0) {
- // This calls the observer_, which means that the observer provided by the
- // user must be ready to accept a bitrate update when it constructs the
+ // This calls the observer_ if set, which means that the observer provided by
+ // the user must be ready to accept a bitrate update when it constructs the
// controller. We do this to avoid having to keep synchronized initial values
// in both the controller and the allocator.
MaybeTriggerOnNetworkChanged();
@@ -122,6 +126,18 @@ void BitrateControllerImpl::SetMinMaxBitrate(int min_bitrate_bps,
MaybeTriggerOnNetworkChanged();
}
+void BitrateControllerImpl::SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) {
+ {
+ rtc::CritScope cs(&critsect_);
+ bandwidth_estimation_.SetBitrates(start_bitrate_bps,
+ min_bitrate_bps,
+ max_bitrate_bps);
+ }
+ MaybeTriggerOnNetworkChanged();
+}
+
void BitrateControllerImpl::SetReservedBitrate(uint32_t reserved_bitrate_bps) {
{
rtc::CritScope cs(&critsect_);
@@ -187,11 +203,15 @@ void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
}
void BitrateControllerImpl::MaybeTriggerOnNetworkChanged() {
- uint32_t bitrate;
+ if (!observer_)
+ return;
+
+ uint32_t bitrate_bps;
uint8_t fraction_loss;
int64_t rtt;
- if (GetNetworkParameters(&bitrate, &fraction_loss, &rtt))
- observer_->OnNetworkChanged(bitrate, fraction_loss, rtt);
+
+ if (GetNetworkParameters(&bitrate_bps, &fraction_loss, &rtt))
+ observer_->OnNetworkChanged(bitrate_bps, fraction_loss, rtt);
}
bool BitrateControllerImpl::GetNetworkParameters(uint32_t* bitrate,
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
index 6f776d758b4..5a61379ce01 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
@@ -20,6 +20,7 @@
#include <list>
#include <utility>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h"
@@ -27,6 +28,8 @@ namespace webrtc {
class BitrateControllerImpl : public BitrateController {
public:
+ // TODO(perkj): BitrateObserver has been deprecated and is not used in WebRTC.
+ // |observer| is left for project that is not yet updated.
BitrateControllerImpl(Clock* clock, BitrateObserver* observer);
virtual ~BitrateControllerImpl() {}
@@ -34,15 +37,26 @@ class BitrateControllerImpl : public BitrateController {
RtcpBandwidthObserver* CreateRtcpBandwidthObserver() override;
+ // Deprecated
void SetStartBitrate(int start_bitrate_bps) override;
+ // Deprecated
void SetMinMaxBitrate(int min_bitrate_bps, int max_bitrate_bps) override;
+ void SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) override;
+
void UpdateDelayBasedEstimate(uint32_t bitrate_bps) override;
void SetReservedBitrate(uint32_t reserved_bitrate_bps) override;
void SetEventLog(RtcEventLog* event_log) override;
+ // Returns true if the parameters have changed since the last call.
+ bool GetNetworkParameters(uint32_t* bitrate,
+ uint8_t* fraction_loss,
+ int64_t* rtt) override;
+
int64_t TimeUntilNextProcess() override;
void Process() override;
@@ -57,20 +71,16 @@ class BitrateControllerImpl : public BitrateController {
int number_of_packets,
int64_t now_ms);
+ // Deprecated
void MaybeTriggerOnNetworkChanged();
- // Returns true if the parameters have changed since the last call.
- bool GetNetworkParameters(uint32_t* bitrate,
- uint8_t* fraction_loss,
- int64_t* rtt);
-
void OnNetworkChanged(uint32_t bitrate,
uint8_t fraction_loss, // 0 - 255.
int64_t rtt) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
// Used by process thread.
- Clock* clock_;
- BitrateObserver* observer_;
+ Clock* const clock_;
+ BitrateObserver* const observer_;
int64_t last_bitrate_update_ms_;
rtc::CriticalSection critsect_;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
index 3f467ef8a46..4f92a3884b5 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -14,11 +14,16 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/mock/mock_paced_sender.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-using webrtc::RtcpBandwidthObserver;
-using webrtc::BitrateObserver;
+using ::testing::Exactly;
+using ::testing::Return;
+
using webrtc::BitrateController;
+using webrtc::BitrateObserver;
+using webrtc::PacedSender;
+using webrtc::RtcpBandwidthObserver;
uint8_t WeightedLoss(int num_packets1, uint8_t fraction_loss1,
int num_packets2, uint8_t fraction_loss2) {
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
index a9c247acf17..a61cf6a7a74 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
@@ -18,6 +18,7 @@
#include <map>
#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
@@ -26,6 +27,8 @@ class CriticalSectionWrapper;
class RtcEventLog;
struct PacketInfo;
+// Deprecated
+// TODO(perkj): Remove BitrateObserver when no implementations use it.
class BitrateObserver {
// Observer class for bitrate changes announced due to change in bandwidth
// estimate or due to bitrate allocation changes. Fraction loss and rtt is
@@ -46,16 +49,26 @@ class BitrateController : public Module {
// estimation and divide the available bitrate between all its registered
// BitrateObservers.
public:
- static const int kDefaultStartBitrateKbps = 300;
+ static const int kDefaultStartBitratebps = 300000;
+ // Deprecated:
+ // TODO(perkj): BitrateObserver has been deprecated and is not used in WebRTC.
+ // Remove this method once other other projects does not use it.
static BitrateController* CreateBitrateController(Clock* clock,
BitrateObserver* observer);
+ static BitrateController* CreateBitrateController(Clock* clock);
+
virtual ~BitrateController() {}
virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0;
+ // Deprecated
virtual void SetStartBitrate(int start_bitrate_bps) = 0;
+ // Deprecated
virtual void SetMinMaxBitrate(int min_bitrate_bps, int max_bitrate_bps) = 0;
+ virtual void SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) = 0;
virtual void UpdateDelayBasedEstimate(uint32_t bitrate_bps) = 0;
@@ -66,6 +79,10 @@ class BitrateController : public Module {
virtual bool AvailableBandwidth(uint32_t* bandwidth) const = 0;
virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) = 0;
+
+ virtual bool GetNetworkParameters(uint32_t* bitrate,
+ uint8_t* fraction_loss,
+ int64_t* rtt) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
index 45b596a8688..da6169e748e 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
@@ -31,10 +31,16 @@ class MockBitrateController : public BitrateController {
MOCK_METHOD1(SetStartBitrate, void(int start_bitrate_bps));
MOCK_METHOD2(SetMinMaxBitrate,
void(int min_bitrate_bps, int max_bitrate_bps));
+ MOCK_METHOD3(SetBitrates,
+ void(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps));
MOCK_METHOD1(UpdateDelayBasedEstimate, void(uint32_t bitrate_bps));
MOCK_METHOD1(SetEventLog, void(RtcEventLog* event_log));
MOCK_CONST_METHOD1(AvailableBandwidth, bool(uint32_t* bandwidth));
MOCK_METHOD1(SetReservedBitrate, void(uint32_t reserved_bitrate_bps));
+ MOCK_METHOD3(GetNetworkParameters,
+ bool(uint32_t* bitrate, uint8_t* fraction_loss, int64_t* rtt));
MOCK_METHOD0(Process, void());
MOCK_METHOD0(TimeUntilNextProcess, int64_t());
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
index 785267d8c97..a1b78a257c6 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -67,6 +67,14 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation()
SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {}
+void SendSideBandwidthEstimation::SetBitrates(int send_bitrate,
+ int min_bitrate,
+ int max_bitrate) {
+ if (send_bitrate > 0)
+ SetSendBitrate(send_bitrate);
+ SetMinMaxBitrate(min_bitrate, max_bitrate);
+}
+
void SendSideBandwidthEstimation::SetSendBitrate(int bitrate) {
RTC_DCHECK_GT(bitrate, 0);
bitrate_ = bitrate;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
index 15894f93954..402d22a6bf4 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -46,6 +46,9 @@ class SendSideBandwidthEstimation {
int number_of_packets,
int64_t now_ms);
+ void SetBitrates(int send_bitrate,
+ int min_bitrate,
+ int max_bitrate);
void SetSendBitrate(int bitrate);
void SetMinMaxBitrate(int min_bitrate, int max_bitrate);
int GetMinBitrate() const;
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
index 11ce46adf3a..9f95fc3a1f9 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
@@ -15,11 +15,11 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/socket.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/send_time_history.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
@@ -47,11 +47,10 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override {
+ const RTPHeader& header) override {
CriticalSectionScoped cs(crit_sect_.get());
PickEstimatorFromHeader(header);
- rbe_->IncomingPacket(arrival_time_ms, payload_size, header, was_paced);
+ rbe_->IncomingPacket(arrival_time_ms, payload_size, header);
}
void Process() override {
@@ -80,7 +79,7 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
return rbe_->LatestEstimate(ssrcs, bitrate_bps);
}
- void SetMinBitrate(int min_bitrate_bps) {
+ void SetMinBitrate(int min_bitrate_bps) override {
CriticalSectionScoped cs(crit_sect_.get());
rbe_->SetMinBitrate(min_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
@@ -115,7 +114,7 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
// Instantiate RBE for Time Offset or Absolute Send Time extensions.
void PickEstimator() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_.get()) {
if (using_absolute_send_time_) {
- rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_, clock_));
+ rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_));
} else {
rbe_.reset(new RemoteBitrateEstimatorSingleStream(observer_, clock_));
}
@@ -140,31 +139,78 @@ CongestionController::CongestionController(
BitrateObserver* bitrate_observer,
RemoteBitrateObserver* remote_bitrate_observer)
: clock_(clock),
- pacer_(new PacedSender(clock_,
- &packet_router_,
- BitrateController::kDefaultStartBitrateKbps,
- PacedSender::kDefaultPaceMultiplier *
- BitrateController::kDefaultStartBitrateKbps,
- 0)),
+ observer_(nullptr),
+ packet_router_(new PacketRouter()),
+ pacer_(new PacedSender(clock_, packet_router_.get())),
remote_bitrate_estimator_(
new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
- // Constructed last as this object calls the provided callback on
- // construction.
bitrate_controller_(
BitrateController::CreateBitrateController(clock_, bitrate_observer)),
- remote_estimator_proxy_(clock_, &packet_router_),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
+ transport_feedback_adapter_(bitrate_controller_.get(), clock_),
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::CongestionController(
+ Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer)
+ : clock_(clock),
+ observer_(observer),
+ packet_router_(new PacketRouter()),
+ pacer_(new PacedSender(clock_, packet_router_.get())),
+ remote_bitrate_estimator_(
+ new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
+ bitrate_controller_(BitrateController::CreateBitrateController(clock_)),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
+ transport_feedback_adapter_(bitrate_controller_.get(), clock_),
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::CongestionController(
+ Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer,
+ std::unique_ptr<PacketRouter> packet_router,
+ std::unique_ptr<PacedSender> pacer)
+ : clock_(clock),
+ observer_(observer),
+ packet_router_(std::move(packet_router)),
+ pacer_(std::move(pacer)),
+ remote_bitrate_estimator_(
+ new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
+ // Constructed last as this object calls the provided callback on
+ // construction.
+ bitrate_controller_(BitrateController::CreateBitrateController(clock_)),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
transport_feedback_adapter_(bitrate_controller_.get(), clock_),
- min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps) {
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::~CongestionController() {}
+
+void CongestionController::Init() {
transport_feedback_adapter_.SetBitrateEstimator(
- new RemoteBitrateEstimatorAbsSendTime(&transport_feedback_adapter_,
- clock_));
+ new RemoteBitrateEstimatorAbsSendTime(&transport_feedback_adapter_));
transport_feedback_adapter_.GetBitrateEstimator()->SetMinBitrate(
min_bitrate_bps_);
}
-CongestionController::~CongestionController() {
-}
-
void CongestionController::SetBweBitrates(int min_bitrate_bps,
int start_bitrate_bps,
@@ -177,16 +223,19 @@ void CongestionController::SetBweBitrates(int min_bitrate_bps,
min_bitrate_bps = kMinBitrateBps;
if (max_bitrate_bps > 0)
max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps);
- if (start_bitrate_bps > 0) {
+ if (start_bitrate_bps > 0)
start_bitrate_bps = std::max(min_bitrate_bps, start_bitrate_bps);
- bitrate_controller_->SetStartBitrate(start_bitrate_bps);
- }
- bitrate_controller_->SetMinMaxBitrate(min_bitrate_bps, max_bitrate_bps);
+
+ bitrate_controller_->SetBitrates(start_bitrate_bps,
+ min_bitrate_bps,
+ max_bitrate_bps);
+
if (remote_bitrate_estimator_)
remote_bitrate_estimator_->SetMinBitrate(min_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
transport_feedback_adapter_.GetBitrateEstimator()->SetMinBitrate(
min_bitrate_bps_);
+ MaybeTriggerOnNetworkChanged();
}
BitrateController* CongestionController::GetBitrateController() const {
@@ -207,10 +256,9 @@ CongestionController::GetTransportFeedbackObserver() {
return &transport_feedback_adapter_;
}
-void CongestionController::UpdatePacerBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps) {
- pacer_->UpdateBitrate(bitrate_kbps, max_bitrate_kbps, min_bitrate_kbps);
+void CongestionController::SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps) {
+ pacer_->SetAllocatedSendBitrate(allocated_bitrate_bps, padding_bitrate_bps);
}
int64_t CongestionController::GetPacerQueuingDelayMs() const {
@@ -223,6 +271,11 @@ void CongestionController::SignalNetworkState(NetworkState state) {
} else {
pacer_->Pause();
}
+ {
+ rtc::CritScope cs(&critsect_);
+ network_state_ = state;
+ }
+ MaybeTriggerOnNetworkChanged();
}
void CongestionController::OnSentPacket(const rtc::SentPacket& sent_packet) {
@@ -243,6 +296,52 @@ int64_t CongestionController::TimeUntilNextProcess() {
void CongestionController::Process() {
bitrate_controller_->Process();
remote_bitrate_estimator_->Process();
+ MaybeTriggerOnNetworkChanged();
+}
+
+void CongestionController::MaybeTriggerOnNetworkChanged() {
+ // TODO(perkj): |observer_| can be nullptr if the ctor that accepts a
+ // BitrateObserver is used. Remove this check once the ctor is removed.
+ if (!observer_)
+ return;
+
+ uint32_t bitrate_bps;
+ uint8_t fraction_loss;
+ int64_t rtt;
+ bool estimate_changed = bitrate_controller_->GetNetworkParameters(
+ &bitrate_bps, &fraction_loss, &rtt);
+ if (estimate_changed)
+ pacer_->SetEstimatedBitrate(bitrate_bps);
+
+ bitrate_bps = IsNetworkDown() || IsSendQueueFull() ? 0 : bitrate_bps;
+
+ if (HasNetworkParametersToReportChanged(bitrate_bps, fraction_loss, rtt)) {
+ observer_->OnNetworkChanged(bitrate_bps, fraction_loss, rtt);
+ }
+}
+
+bool CongestionController::HasNetworkParametersToReportChanged(
+ uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt) {
+ rtc::CritScope cs(&critsect_);
+ bool changed =
+ last_reported_bitrate_bps_ != bitrate_bps ||
+ (bitrate_bps > 0 && (last_reported_fraction_loss_ != fraction_loss ||
+ last_reported_rtt_ != rtt));
+ last_reported_bitrate_bps_ = bitrate_bps;
+ last_reported_fraction_loss_ = fraction_loss;
+ last_reported_rtt_ = rtt;
+ return changed;
+}
+
+bool CongestionController::IsSendQueueFull() const {
+ return pacer_->ExpectedQueueTimeMs() > PacedSender::kMaxQueueLengthMs;
+}
+
+bool CongestionController::IsNetworkDown() const {
+ rtc::CritScope cs(&critsect_);
+ return network_state_ == kNetworkDown;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc
new file mode 100644
index 00000000000..c82c75daf3c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/pacing/mock/mock_paced_sender.h"
+#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
+#include "webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+using testing::_;
+using testing::NiceMock;
+using testing::Return;
+using testing::SaveArg;
+using testing::StrictMock;
+
+namespace webrtc {
+namespace test {
+
+class CongestionControllerTest : public ::testing::Test {
+ protected:
+ CongestionControllerTest() : clock_(123456) {}
+ ~CongestionControllerTest() override {}
+
+ void SetUp() override {
+ pacer_ = new NiceMock<MockPacedSender>();
+ std::unique_ptr<PacedSender> pacer(pacer_); // Passes ownership.
+ std::unique_ptr<PacketRouter> packet_router(new PacketRouter());
+ controller_.reset(
+ new CongestionController(&clock_, &observer_, &remote_bitrate_observer_,
+ std::move(packet_router), std::move(pacer)));
+ bandwidth_observer_.reset(
+ controller_->GetBitrateController()->CreateRtcpBandwidthObserver());
+
+ // Set the initial bitrate estimate and expect the |observer| and |pacer_|
+ // to be updated.
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps));
+ controller_->SetBweBitrates(0, kInitialBitrateBps, 5 * kInitialBitrateBps);
+ }
+
+ SimulatedClock clock_;
+ StrictMock<MockCongestionObserver> observer_;
+ NiceMock<MockPacedSender>* pacer_;
+ NiceMock<MockRemoteBitrateObserver> remote_bitrate_observer_;
+ std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ std::unique_ptr<CongestionController> controller_;
+ const uint32_t kInitialBitrateBps = 60000;
+};
+
+TEST_F(CongestionControllerTest, OnNetworkChanged) {
+ // Test no change.
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, OnSendQueueFull) {
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, OnSendQueueFullAndEstimateChange) {
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Receive new estimate but let the queue still be full.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+ // The send pacer should get the new estimate though.
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ // |OnNetworkChanged| should be called with the new estimate.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, SignalNetworkState) {
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->SignalNetworkState(kNetworkDown);
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ controller_->SignalNetworkState(kNetworkUp);
+
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->SignalNetworkState(kNetworkDown);
+}
+
+TEST_F(CongestionControllerTest,
+ SignalNetworkStateAndQueueIsFullAndEstimateChange) {
+ // Send queue is full
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillRepeatedly(Return(PacedSender::kMaxQueueLengthMs + 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Queue is full and network is down. Expect no bitrate change.
+ controller_->SignalNetworkState(kNetworkDown);
+ controller_->Process();
+
+ // Queue is full but network is up. Expect no bitrate change.
+ controller_->SignalNetworkState(kNetworkUp);
+ controller_->Process();
+
+ // Receive new estimate but let the queue still be full.
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ controller_->Process();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h b/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
index 65bf5be3ec1..da8719d33a7 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
+++ b/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
@@ -11,13 +11,16 @@
#ifndef WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_CONGESTION_CONTROLLER_H_
#define WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_CONGESTION_CONTROLLER_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
-#include "webrtc/stream.h"
namespace rtc {
struct SentPacket;
@@ -28,7 +31,6 @@ namespace webrtc {
class BitrateController;
class BitrateObserver;
class Clock;
-class PacedSender;
class ProcessThread;
class RemoteBitrateEstimator;
class RemoteBitrateObserver;
@@ -36,9 +38,33 @@ class TransportFeedbackObserver;
class CongestionController : public CallStatsObserver, public Module {
public:
+ // Observer class for bitrate changes announced due to change in bandwidth
+ // estimate or due to that the send pacer is full. Fraction loss and rtt is
+ // also part of this callback to allow the observer to optimize its settings
+ // for different types of network environments. The bitrate does not include
+ // packet headers and is measured in bits per second.
+ class Observer {
+ public:
+ virtual void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_loss, // 0 - 255.
+ int64_t rtt_ms) = 0;
+
+ protected:
+ virtual ~Observer() {}
+ };
+ // Deprecated
+ // TODO(perkj): Remove once no other clients use this ctor.
CongestionController(Clock* clock,
BitrateObserver* bitrate_observer,
RemoteBitrateObserver* remote_bitrate_observer);
+ CongestionController(Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer);
+ CongestionController(Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer,
+ std::unique_ptr<PacketRouter> packet_router,
+ std::unique_ptr<PacedSender> pacer);
virtual ~CongestionController();
virtual void SetBweBitrates(int min_bitrate_bps,
@@ -50,12 +76,11 @@ class CongestionController : public CallStatsObserver, public Module {
bool send_side_bwe);
virtual int64_t GetPacerQueuingDelayMs() const;
virtual PacedSender* pacer() { return pacer_.get(); }
- virtual PacketRouter* packet_router() { return &packet_router_; }
+ virtual PacketRouter* packet_router() { return packet_router_.get(); }
virtual TransportFeedbackObserver* GetTransportFeedbackObserver();
- virtual void UpdatePacerBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ void SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps);
virtual void OnSentPacket(const rtc::SentPacket& sent_packet);
@@ -67,14 +92,28 @@ class CongestionController : public CallStatsObserver, public Module {
void Process() override;
private:
+ void Init();
+ void MaybeTriggerOnNetworkChanged();
+
+ bool IsSendQueueFull() const;
+ bool IsNetworkDown() const;
+ bool HasNetworkParametersToReportChanged(uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt);
Clock* const clock_;
- const rtc::scoped_ptr<PacedSender> pacer_;
- const rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
- const rtc::scoped_ptr<BitrateController> bitrate_controller_;
- PacketRouter packet_router_;
+ Observer* const observer_;
+ const std::unique_ptr<PacketRouter> packet_router_;
+ const std::unique_ptr<PacedSender> pacer_;
+ const std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ const std::unique_ptr<BitrateController> bitrate_controller_;
RemoteEstimatorProxy remote_estimator_proxy_;
TransportFeedbackAdapter transport_feedback_adapter_;
int min_bitrate_bps_;
+ rtc::CriticalSection critsect_;
+ uint32_t last_reported_bitrate_bps_ GUARDED_BY(critsect_);
+ uint8_t last_reported_fraction_loss_ GUARDED_BY(critsect_);
+ int64_t last_reported_rtt_ GUARDED_BY(critsect_);
+ NetworkState network_state_ GUARDED_BY(critsect_);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(CongestionController);
};
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h b/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
index 0813c3d42cd..20955ea81a8 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
+++ b/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
@@ -12,20 +12,27 @@
#define WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_MOCK_MOCK_CONGESTION_CONTROLLER_H_
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socket.h"
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
namespace webrtc {
namespace test {
+class MockCongestionObserver : public CongestionController::Observer {
+ public:
+ MOCK_METHOD3(OnNetworkChanged,
+ void(uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt_ms));
+};
+
class MockCongestionController : public CongestionController {
public:
MockCongestionController(Clock* clock,
- BitrateObserver* bitrate_observer,
+ Observer* observer,
RemoteBitrateObserver* remote_bitrate_observer)
- : CongestionController(clock,
- bitrate_observer,
- remote_bitrate_observer) {}
+ : CongestionController(clock, observer, remote_bitrate_observer) {}
MOCK_METHOD3(SetBweBitrates,
void(int min_bitrate_bps,
int start_bitrate_bps,
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
index aa33993192a..894d9308e36 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
@@ -60,9 +60,7 @@ source_set("desktop_capture") {
"mouse_cursor_monitor.h",
"mouse_cursor_monitor_mac.mm",
"mouse_cursor_monitor_win.cc",
- "screen_capture_frame_queue.cc",
"screen_capture_frame_queue.h",
- "screen_capturer.cc",
"screen_capturer.h",
"screen_capturer_helper.cc",
"screen_capturer_helper.h",
@@ -87,7 +85,6 @@ source_set("desktop_capture") {
"win/screen_capturer_win_magnifier.h",
"win/window_capture_utils.cc",
"win/window_capture_utils.h",
- "window_capturer.cc",
"window_capturer.h",
"window_capturer_mac.mm",
"window_capturer_win.cc",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
index f57fc572b65..733fe9b2adf 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
@@ -12,6 +12,8 @@
#include "webrtc/modules/desktop_capture/cropped_desktop_frame.h"
+#include "webrtc/base/constructormagic.h"
+
namespace webrtc {
// A DesktopFrame that is a sub-rect of another DesktopFrame.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
index 0dd564f1705..cbe7d96e5d6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
@@ -32,7 +32,7 @@ void CroppingWindowCapturer::Start(DesktopCapturer::Callback* callback) {
}
void CroppingWindowCapturer::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
window_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
index 177b5443a31..dfeb447e449 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
@@ -13,7 +13,6 @@
#include <memory>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
#include "webrtc/modules/desktop_capture/window_capturer.h"
@@ -32,7 +31,7 @@ class CroppingWindowCapturer : public WindowCapturer,
// DesktopCapturer implementation.
void Start(DesktopCapturer::Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
void SetExcludedWindow(WindowId window) override;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
index 55afedeabf1..4c6e27e561e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
@@ -137,7 +138,7 @@ void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) {
}
void DesktopAndCursorComposer::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
desktop_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
index cd0b2cfbb65..dcbe6129e65 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
@@ -13,7 +13,7 @@
#include <memory>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
@@ -36,7 +36,7 @@ class DesktopAndCursorComposer : public DesktopCapturer,
// DesktopCapturer interface.
void Start(DesktopCapturer::Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
void SetExcludedWindow(WindowId window) override;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
index b92447c3497..c4fbabf6b24 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
@@ -56,9 +56,7 @@
"mouse_cursor_monitor_mac.mm",
"mouse_cursor_monitor_win.cc",
"mouse_cursor_monitor_x11.cc",
- "screen_capture_frame_queue.cc",
"screen_capture_frame_queue.h",
- "screen_capturer.cc",
"screen_capturer.h",
"screen_capturer_helper.cc",
"screen_capturer_helper.h",
@@ -84,7 +82,6 @@
"win/screen_capture_utils.h",
"win/window_capture_utils.cc",
"win/window_capture_utils.h",
- "window_capturer.cc",
"window_capturer.h",
"window_capturer_mac.mm",
"window_capturer_win.cc",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
index 47f78dc3ff4..ba70e015537 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
@@ -13,7 +13,8 @@
#include <stddef.h>
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/shared_memory.h"
@@ -28,11 +29,6 @@ class DesktopCapturer {
// Interface that must be implemented by the DesktopCapturer consumers.
class Callback {
public:
- // Deprecated.
- // TODO(sergeyu): Remove this method once all references to it are removed
- // from chromium.
- virtual SharedMemory* CreateSharedMemory(size_t size) { return nullptr; }
-
// Called after a frame has been captured. Handler must take ownership of
// |frame|. If capture has failed for any reason |frame| is set to NULL
// (e.g. the window has been closed).
@@ -53,7 +49,7 @@ class DesktopCapturer {
// where Capture() is called. It will be destroyed on the same thread. Shared
// memory is currently supported only by some DesktopCapturer implementations.
virtual void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {}
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {}
// Captures next frame. |region| specifies region of the capture target that
// should be fresh in the resulting frame. The frame may also include fresh
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
index 6bc7b2e38f6..3278ed46dcb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
@@ -84,8 +84,7 @@ std::unique_ptr<DesktopFrame> SharedMemoryDesktopFrame::Create(
size_t buffer_size =
size.width() * size.height() * DesktopFrame::kBytesPerPixel;
std::unique_ptr<SharedMemory> shared_memory;
- shared_memory = rtc::ScopedToUnique(
- shared_memory_factory->CreateSharedMemory(buffer_size));
+ shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
if (!shared_memory)
return nullptr;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
index 3cd839ca1b8..53091239e40 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/modules/desktop_capture/shared_memory.h"
@@ -55,11 +56,6 @@ class DesktopFrame {
int64_t capture_time_ms() const { return capture_time_ms_; }
void set_capture_time_ms(int64_t time_ms) { capture_time_ms_ = time_ms; }
- // Optional shape for the frame. Frames may be shaped e.g. if
- // capturing the contents of a shaped window.
- const DesktopRegion* shape() const { return shape_.get(); }
- void set_shape(DesktopRegion* shape) { shape_.reset(shape); }
-
// Copies pixels from a buffer or another frame. |dest_rect| rect must lay
// within bounds of this frame.
void CopyPixelsFrom(uint8_t* src_buffer, int src_stride,
@@ -89,7 +85,6 @@ class DesktopFrame {
DesktopRegion updated_region_;
DesktopVector dpi_;
int64_t capture_time_ms_;
- std::unique_ptr<DesktopRegion> shape_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrame);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
index f139fb5cddf..624b729203b 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
@@ -49,10 +49,8 @@ DesktopFrameWin* DesktopFrameWin::Create(
std::unique_ptr<SharedMemory> shared_memory;
HANDLE section_handle = nullptr;
if (shared_memory_factory) {
- shared_memory = rtc::ScopedToUnique(
- shared_memory_factory->CreateSharedMemory(buffer_size));
- if (shared_memory)
- section_handle = shared_memory->handle();
+ shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
+ section_handle = shared_memory->handle();
}
void* data = nullptr;
HBITMAP bitmap = CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, &data,
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
index 929d23c0e56..3513e14ffb7 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
@@ -15,6 +15,7 @@
#include <windows.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
index bc9972660ad..e130c103811 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
@@ -20,6 +20,9 @@ DesktopRegion::RowSpan::RowSpan(int32_t left, int32_t right)
: left(left), right(right) {
}
+DesktopRegion::Row::Row(const Row&) = default;
+DesktopRegion::Row::Row(Row&&) = default;
+
DesktopRegion::Row::Row(int32_t top, int32_t bottom)
: top(top), bottom(bottom) {
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
index c86da56e173..5278159412a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
@@ -47,6 +47,8 @@ class DesktopRegion {
// Row represents a single row of a region. A row is set of rectangles that
// have the same vertical position.
struct Row {
+ Row(const Row&);
+ Row(Row&&);
Row(int32_t top, int32_t bottom);
~Row();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ.h b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
index c3dcd4b0e1d..9ab059bcaa2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
@@ -14,6 +14,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
index df4e6a45c0a..543910de289 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
@@ -11,6 +11,7 @@
#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/differ.h"
#include "webrtc/modules/desktop_capture/differ_block.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
index 2f2dd72a6e5..329beef23d3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -16,6 +16,7 @@
#include <memory>
#include <set>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
#include "webrtc/system_wrappers/include/atomic32.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
index 2d3c2d90479..451acb3db5e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
@@ -15,6 +15,7 @@
#include <string>
#include "webrtc/base/macutils.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
@@ -141,7 +142,7 @@ bool IsChromeWindow(CGWindowID id) {
} // namespace
FullScreenChromeWindowDetector::FullScreenChromeWindowDetector()
- : ref_count_(0) {}
+ : ref_count_(0), last_update_time_ns_(0) {}
FullScreenChromeWindowDetector::~FullScreenChromeWindowDetector() {}
@@ -161,10 +162,7 @@ CGWindowID FullScreenChromeWindowDetector::FindFullScreenWindow(
if (static_cast<CGWindowID>(it->id) != full_screen_window_id)
continue;
- int64_t time_interval =
- (TickTime::Now() - last_udpate_time_).Milliseconds();
- LOG(LS_WARNING) << "The full-screen window exists in the list, "
- << "which was updated " << time_interval << "ms ago.";
+ LOG(LS_WARNING) << "The full-screen window exists in the list.";
return kCGNullWindowID;
}
@@ -174,7 +172,7 @@ CGWindowID FullScreenChromeWindowDetector::FindFullScreenWindow(
void FullScreenChromeWindowDetector::UpdateWindowListIfNeeded(
CGWindowID original_window) {
if (IsChromeWindow(original_window) &&
- (TickTime::Now() - last_udpate_time_).Milliseconds()
+ (rtc::TimeNanos() - last_update_time_ns_) / rtc::kNumNanosecsPerMillisec
> kUpdateIntervalMs) {
previous_window_list_.clear();
previous_window_list_.swap(current_window_list_);
@@ -186,7 +184,7 @@ void FullScreenChromeWindowDetector::UpdateWindowListIfNeeded(
}
GetWindowList(&current_window_list_);
- last_udpate_time_ = TickTime::Now();
+ last_update_time_ns_ = rtc::TimeNanos();
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
index 4e6008966e6..838966d46eb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
@@ -13,9 +13,9 @@
#include <ApplicationServices/ApplicationServices.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/window_capturer.h"
#include "webrtc/system_wrappers/include/atomic32.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -59,7 +59,7 @@ class FullScreenChromeWindowDetector {
// full-screen window exists in the list) if Capture() is called too soon.
WindowCapturer::WindowList current_window_list_;
WindowCapturer::WindowList previous_window_list_;
- TickTime last_udpate_time_;
+ int64_t last_update_time_ns_;
RTC_DISALLOW_COPY_AND_ASSIGN(FullScreenChromeWindowDetector);
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
index 204bb00b160..479a39a0217 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
@@ -11,10 +11,12 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
#include <assert.h>
+#include <string.h>
#include <memory>
#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
#include "webrtc/modules/desktop_capture/win/cursor.h"
#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
@@ -22,6 +24,17 @@
namespace webrtc {
+namespace {
+
+bool IsSameCursorShape(const CURSORINFO& left, const CURSORINFO& right) {
+ // If the cursors are not showing, we do not care the hCursor handle.
+ return left.flags == right.flags &&
+ (left.flags != CURSOR_SHOWING ||
+ left.hCursor == right.hCursor);
+}
+
+} // namespace
+
class MouseCursorMonitorWin : public MouseCursorMonitor {
public:
explicit MouseCursorMonitorWin(HWND window);
@@ -45,7 +58,8 @@ class MouseCursorMonitorWin : public MouseCursorMonitor {
HDC desktop_dc_;
- HCURSOR last_cursor_;
+ // The last CURSORINFO (converted to MouseCursor) we have sent to the client.
+ CURSORINFO last_cursor_;
};
MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
@@ -53,8 +67,8 @@ MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
screen_(kInvalidScreenId),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
- desktop_dc_(NULL),
- last_cursor_(NULL) {
+ desktop_dc_(NULL) {
+ memset(&last_cursor_, 0, sizeof(CURSORINFO));
}
MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
@@ -62,9 +76,9 @@ MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
screen_(screen),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
- desktop_dc_(NULL),
- last_cursor_(NULL) {
+ desktop_dc_(NULL) {
assert(screen >= kFullDesktopScreenId);
+ memset(&last_cursor_, 0, sizeof(CURSORINFO));
}
MouseCursorMonitorWin::~MouseCursorMonitorWin() {
@@ -92,13 +106,31 @@ void MouseCursorMonitorWin::Capture() {
return;
}
- if (last_cursor_ != cursor_info.hCursor) {
- last_cursor_ = cursor_info.hCursor;
- // Note that |cursor_info.hCursor| does not need to be freed.
- std::unique_ptr<MouseCursor> cursor(
- CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
- if (cursor.get())
- callback_->OnMouseCursor(cursor.release());
+ if (!IsSameCursorShape(cursor_info, last_cursor_)) {
+ if (cursor_info.flags == CURSOR_SUPPRESSED) {
+ // The cursor is intentionally hidden now, send an empty bitmap.
+ last_cursor_ = cursor_info;
+ callback_->OnMouseCursor(new MouseCursor(
+ new BasicDesktopFrame(DesktopSize()), DesktopVector()));
+ } else {
+ // According to MSDN https://goo.gl/u6gyuC, HCURSOR instances returned by
+ // functions other than CreateCursor do not need to be actively destroyed.
+ // And CloseHandle function (https://goo.gl/ja5ycW) does not close a
+ // cursor, so assume a HCURSOR does not need to be closed.
+ if (cursor_info.flags == 0) {
+ // Host machine does not have a hardware mouse attached, we will send a
+ // default one instead.
+ // Note, Windows automatically caches cursor resource, so we do not need
+ // to cache the result of LoadCursor.
+ cursor_info.hCursor = LoadCursor(nullptr, IDC_ARROW);
+ }
+ std::unique_ptr<MouseCursor> cursor(
+ CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
+ if (cursor) {
+ last_cursor_ = cursor_info;
+ callback_->OnMouseCursor(cursor.release());
+ }
+ }
}
if (mode_ != SHAPE_AND_POSITION)
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h
deleted file mode 100644
index 57120a0b3fa..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
-#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
-
-// This file is no longer needed, but some code in chromium still includes it.
-// TODO(sergeyu): Cleanup dependencies in chromium and remove this file.
-
-#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
deleted file mode 100644
index 94d8a27b137..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
-
-#include <assert.h>
-#include <algorithm>
-
-#include "webrtc/modules/desktop_capture/desktop_frame.h"
-#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-ScreenCaptureFrameQueue::ScreenCaptureFrameQueue() : current_(0) {}
-
-ScreenCaptureFrameQueue::~ScreenCaptureFrameQueue() {}
-
-void ScreenCaptureFrameQueue::MoveToNextFrame() {
- current_ = (current_ + 1) % kQueueLength;
-
- // Verify that the frame is not shared, i.e. that consumer has released it
- // before attempting to capture again.
- assert(!frames_[current_].get() || !frames_[current_]->IsShared());
-}
-
-void ScreenCaptureFrameQueue::ReplaceCurrentFrame(DesktopFrame* frame) {
- frames_[current_].reset(SharedDesktopFrame::Wrap(frame));
-}
-
-void ScreenCaptureFrameQueue::Reset() {
- for (int i = 0; i < kQueueLength; ++i)
- frames_[i].reset();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
index 21af0f320fc..97f3b810e9d 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
@@ -13,12 +13,12 @@
#include <memory>
-#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
-#include "webrtc/typedefs.h"
+#include "webrtc/base/constructormagic.h"
+// TODO(zijiehe): These headers are not used in this file, but to avoid build
+// break in remoting/host. We should add headers in each individual files.
+#include "webrtc/modules/desktop_capture/desktop_frame.h" // Remove
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h" // Remove
-namespace webrtc {
-class DesktopFrame;
-} // namespace webrtc
namespace webrtc {
@@ -35,28 +35,38 @@ namespace webrtc {
// Frame consumer is expected to never hold more than kQueueLength frames
// created by this function and it should release the earliest one before trying
// to capture a new frame (i.e. before MoveToNextFrame() is called).
+template <typename FrameType>
class ScreenCaptureFrameQueue {
public:
- ScreenCaptureFrameQueue();
- ~ScreenCaptureFrameQueue();
+ ScreenCaptureFrameQueue() : current_(0) {}
+ ~ScreenCaptureFrameQueue() = default;
// Moves to the next frame in the queue, moving the 'current' frame to become
// the 'previous' one.
- void MoveToNextFrame();
+ void MoveToNextFrame() {
+ current_ = (current_ + 1) % kQueueLength;
+ }
// Replaces the current frame with a new one allocated by the caller. The
// existing frame (if any) is destroyed. Takes ownership of |frame|.
- void ReplaceCurrentFrame(DesktopFrame* frame);
+ void ReplaceCurrentFrame(FrameType* frame) {
+ frames_[current_].reset(frame);
+ }
// Marks all frames obsolete and resets the previous frame pointer. No
// frames are freed though as the caller can still access them.
- void Reset();
+ void Reset() {
+ for (int i = 0; i < kQueueLength; i++) {
+ frames_[i].reset();
+ }
+ current_ = 0;
+ }
- SharedDesktopFrame* current_frame() const {
+ FrameType* current_frame() const {
return frames_[current_].get();
}
- SharedDesktopFrame* previous_frame() const {
+ FrameType* previous_frame() const {
return frames_[(current_ + kQueueLength - 1) % kQueueLength].get();
}
@@ -65,7 +75,7 @@ class ScreenCaptureFrameQueue {
int current_;
static const int kQueueLength = 2;
- std::unique_ptr<SharedDesktopFrame> frames_[kQueueLength];
+ std::unique_ptr<FrameType> frames_[kQueueLength];
RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCaptureFrameQueue);
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc
deleted file mode 100644
index 97f69d3baff..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/screen_capturer.h"
-
-#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-
-namespace webrtc {
-
-ScreenCapturer* ScreenCapturer::Create() {
- return Create(DesktopCaptureOptions::CreateDefault());
-}
-
-#if defined(WEBRTC_LINUX)
-ScreenCapturer* ScreenCapturer::CreateWithXDamage(
- bool use_update_notifications) {
- DesktopCaptureOptions options;
- options.set_use_update_notifications(use_update_notifications);
- return Create(options);
-}
-#elif defined(WEBRTC_WIN)
-ScreenCapturer* ScreenCapturer::CreateWithDisableAero(bool disable_effects) {
- DesktopCaptureOptions options;
- options.set_disable_effects(disable_effects);
- return Create(options);
-}
-#endif
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
index b4e34887662..48ecc31fbd8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
@@ -48,34 +48,10 @@ class ScreenCapturer : public DesktopCapturer {
};
typedef std::vector<Screen> ScreenList;
- // TODO(sergeyu): Remove this class once all dependencies are removed from
- // chromium.
- class MouseShapeObserver {
- };
-
virtual ~ScreenCapturer() {}
- // Creates platform-specific capturer.
- //
- // TODO(sergeyu): Remove all Create() methods except the first one.
- // crbug.com/172183
+ // Creates a platform-specific capturer.
static ScreenCapturer* Create(const DesktopCaptureOptions& options);
- static ScreenCapturer* Create();
-
-#if defined(WEBRTC_LINUX)
- // Creates platform-specific capturer and instructs it whether it should use
- // X DAMAGE support.
- static ScreenCapturer* CreateWithXDamage(bool use_x_damage);
-#elif defined(WEBRTC_WIN)
- // Creates Windows-specific capturer and instructs it whether or not to
- // disable desktop compositing.
- static ScreenCapturer* CreateWithDisableAero(bool disable_aero);
-#endif // defined(WEBRTC_WIN)
-
- // TODO(sergeyu): Remove this method once all dependencies are removed from
- // chromium.
- virtual void SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) {};
// Get the list of screens (not containing kFullDesktopScreenId). Returns
// false in case of a failure.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
index f912378333c..458bccc1ed3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
index c41dc4d7a3b..bf6c72950c2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
@@ -22,7 +22,10 @@
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
+#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/macutils.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
@@ -32,8 +35,8 @@
#include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -234,7 +237,7 @@ class ScreenCapturerMac : public ScreenCapturer {
ScopedPixelBufferObject pixel_buffer_object_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Current display configuration.
MacDesktopConfiguration desktop_config_;
@@ -381,9 +384,10 @@ void ScreenCapturerMac::Start(Callback* callback) {
}
void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
desktop_config_monitor_->Lock();
MacDesktopConfiguration new_config =
@@ -405,7 +409,7 @@ void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
// Note that we can't reallocate other buffers at this point, since the caller
// may still be reading from them.
if (!queue_.current_frame())
- queue_.ReplaceCurrentFrame(CreateFrame());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(CreateFrame()));
DesktopFrame* current_frame = queue_.current_frame();
@@ -444,7 +448,8 @@ void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
desktop_config_monitor_->Unlock();
new_frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(new_frame);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
index 64d649cd9d1..815c7f53afe 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
@@ -16,6 +16,7 @@
#include <ostream>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
@@ -38,7 +39,10 @@ class ScreenCapturerMacTest : public testing::Test {
void CaptureDoneCallback2(DesktopFrame* frame);
protected:
- void SetUp() override { capturer_.reset(ScreenCapturer::Create()); }
+ void SetUp() override {
+ capturer_.reset(
+ ScreenCapturer::Create(DesktopCaptureOptions::CreateDefault()));
+ }
std::unique_ptr<ScreenCapturer> capturer_;
MockScreenCapturerCallback callback_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
index b1f64e41c97..7264249e0fb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_MOCK_OBJECTS_H_
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
index 72105acca47..bc87ed3eba8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
@@ -14,6 +14,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
@@ -59,8 +60,8 @@ class FakeSharedMemoryFactory : public SharedMemoryFactory {
FakeSharedMemoryFactory() {}
~FakeSharedMemoryFactory() override {}
- rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
- return rtc::scoped_ptr<SharedMemory>(
+ std::unique_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
+ return std::unique_ptr<SharedMemory>(
new FakeSharedMemory(new char[size], size));
}
@@ -117,7 +118,7 @@ TEST_F(ScreenCapturerTest, UseSharedBuffers) {
capturer_->Start(&callback_);
capturer_->SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory>(new FakeSharedMemoryFactory()));
+ std::unique_ptr<SharedMemoryFactory>(new FakeSharedMemoryFactory()));
capturer_->Capture(DesktopRegion());
ASSERT_TRUE(frame);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
index 65e682b6f8b..5540e6820fa 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
@@ -21,14 +21,16 @@
#include <X11/Xutil.h>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/differ.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -106,7 +108,7 @@ class ScreenCapturerLinux : public ScreenCapturer,
ScreenCapturerHelper helper_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Invalid region from the previous capture. This is used to synchronize the
// current with the last buffer used.
@@ -234,9 +236,10 @@ void ScreenCapturerLinux::Start(Callback* callback) {
}
void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
// Process XEvents for XDamage and cursor shape tracking.
options_.x_display()->ProcessPendingXEvents();
@@ -256,7 +259,7 @@ void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
if (!queue_.current_frame()) {
std::unique_ptr<DesktopFrame> frame(
new BasicDesktopFrame(x_server_pixel_buffer_.window_size()));
- queue_.ReplaceCurrentFrame(frame.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(frame.release()));
}
// Refresh the Differ helper used by CaptureFrame(), if needed.
@@ -274,7 +277,8 @@ void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
DesktopFrame* result = CaptureScreen();
last_invalid_region_ = result->updated_region();
result->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(result);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
index 309bac55add..8d10827e29c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
@@ -12,6 +12,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/include/atomic32.h"
namespace webrtc {
@@ -48,8 +49,7 @@ class SharedDesktopFrame::Core {
SharedDesktopFrame::~SharedDesktopFrame() {}
// static
-SharedDesktopFrame* SharedDesktopFrame::Wrap(
- DesktopFrame* desktop_frame) {
+SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) {
rtc::scoped_refptr<Core> core(new Core(desktop_frame));
return new SharedDesktopFrame(core);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
index 7d18db153cd..4f6a2bb7c69 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
index 45f531e0d53..6e15f23f6b8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
@@ -17,8 +17,9 @@
#include <windows.h>
#endif
+#include <memory>
+
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -69,7 +70,7 @@ class SharedMemoryFactory {
SharedMemoryFactory() {}
virtual ~SharedMemoryFactory() {}
- virtual rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) = 0;
+ virtual std::unique_ptr<SharedMemory> CreateSharedMemory(size_t size) = 0;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(SharedMemoryFactory);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
index d3035a15ca5..9df2e5fc9b2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
@@ -14,6 +14,8 @@
#include <utility>
+#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
@@ -24,7 +26,6 @@
#include "webrtc/modules/desktop_capture/win/desktop.h"
#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -36,22 +37,6 @@ const UINT DWM_EC_ENABLECOMPOSITION = 1;
const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
-// SharedMemoryFactory that creates SharedMemory using the deprecated
-// DesktopCapturer::Callback::CreateSharedMemory().
-class CallbackSharedMemoryFactory : public SharedMemoryFactory {
- public:
- CallbackSharedMemoryFactory(DesktopCapturer::Callback* callback)
- : callback_(callback) {}
- ~CallbackSharedMemoryFactory() override {}
-
- rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
- return rtc::scoped_ptr<SharedMemory>(callback_->CreateSharedMemory(size));
- }
-
- private:
- DesktopCapturer::Callback* callback_;
-};
-
} // namespace
ScreenCapturerWinGdi::ScreenCapturerWinGdi(const DesktopCaptureOptions& options)
@@ -89,15 +74,15 @@ ScreenCapturerWinGdi::~ScreenCapturerWinGdi() {
}
void ScreenCapturerWinGdi::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
- shared_memory_factory_ =
- rtc::ScopedToUnique(std::move(shared_memory_factory));
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
+ shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
// Request that the system not power-down the system, or the display hardware.
if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
@@ -152,7 +137,8 @@ void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
frame->mutable_updated_region()->Clear();
helper_.TakeInvalidRegion(frame->mutable_updated_region());
frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(frame);
}
@@ -172,8 +158,6 @@ void ScreenCapturerWinGdi::Start(Callback* callback) {
assert(callback);
callback_ = callback;
- if (!shared_memory_factory_)
- shared_memory_factory_.reset(new CallbackSharedMemoryFactory(callback));
// Vote to disable Aero composited desktop effects while capturing. Windows
// will restore Aero automatically if the process exits. This has no effect
@@ -265,9 +249,9 @@ bool ScreenCapturerWinGdi::CaptureImage() {
std::unique_ptr<DesktopFrame> buffer(DesktopFrameWin::Create(
size, shared_memory_factory_.get(), desktop_dc_));
- if (!buffer.get())
+ if (!buffer)
return false;
- queue_.ReplaceCurrentFrame(buffer.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(buffer.release()));
}
// Select the target bitmap into the memory dc and copy the rect from desktop
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
index 17cb0aa1940..5a50580e690 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
@@ -17,9 +17,10 @@
#include <windows.h>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
namespace webrtc {
@@ -37,7 +38,7 @@ class ScreenCapturerWinGdi : public ScreenCapturer {
// Overridden from ScreenCapturer:
void Start(Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
bool GetScreenList(ScreenList* screens) override;
bool SelectScreen(ScreenId id) override;
@@ -71,7 +72,7 @@ class ScreenCapturerWinGdi : public ScreenCapturer {
HDC memory_dc_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Rectangle describing the bounds of the desktop device context, relative to
// the primary display's top-left.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
index 8af9779ce80..6b4308bb7a3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
@@ -14,6 +14,7 @@
#include <utility>
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
@@ -24,7 +25,6 @@
#include "webrtc/modules/desktop_capture/win/desktop.h"
#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -82,13 +82,12 @@ void ScreenCapturerWinMagnifier::Start(Callback* callback) {
}
void ScreenCapturerWinMagnifier::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
- shared_memory_factory_ =
- rtc::ScopedToUnique(std::move(shared_memory_factory));
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
+ shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
@@ -169,7 +168,8 @@ void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
frame->mutable_updated_region()->Clear();
helper_.TakeInvalidRegion(frame->mutable_updated_region());
frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(frame);
}
@@ -433,7 +433,7 @@ void ScreenCapturerWinMagnifier::CreateCurrentFrameIfNecessary(
? SharedMemoryDesktopFrame::Create(size,
shared_memory_factory_.get())
: std::unique_ptr<DesktopFrame>(new BasicDesktopFrame(size));
- queue_.ReplaceCurrentFrame(frame.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(frame.release()));
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
index d5e3946d627..623c8a30034 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
@@ -18,10 +18,10 @@
#include <wincodec.h>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
#include "webrtc/system_wrappers/include/atomic32.h"
@@ -47,7 +47,7 @@ class ScreenCapturerWinMagnifier : public ScreenCapturer {
// Overridden from ScreenCapturer:
void Start(Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
bool GetScreenList(ScreenList* screens) override;
bool SelectScreen(ScreenId id) override;
@@ -118,7 +118,7 @@ class ScreenCapturerWinMagnifier : public ScreenCapturer {
ScreenCapturerHelper helper_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Class to calculate the difference between two screen bitmaps.
std::unique_ptr<Differ> differ_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
index 7c80490f609..85bca4c8246 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
@@ -10,6 +10,7 @@
#include <windows.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc
deleted file mode 100644
index c5176d5e605..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc
+++ /dev/null
@@ -1,22 +0,0 @@
- /*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/window_capturer.h"
-
-#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-
-namespace webrtc {
-
-// static
-WindowCapturer* WindowCapturer::Create() {
- return Create(DesktopCaptureOptions::CreateDefault());
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
index 9ba441a8ecb..eb9b9feaf04 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
@@ -38,9 +38,6 @@ class WindowCapturer : public DesktopCapturer {
static WindowCapturer* Create(const DesktopCaptureOptions& options);
- // TODO(sergeyu): Remove this method. crbug.com/172183
- static WindowCapturer* Create();
-
virtual ~WindowCapturer() {}
// Get list of windows. Returns false in case of a failure.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
index 22061edbe7a..ac5fdb6bc1a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
@@ -15,6 +15,7 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/macutils.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
@@ -23,7 +24,6 @@
#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
index b74f17e39bf..5f32c3d71cc 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
@@ -12,6 +12,7 @@
#include <assert.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
index 0d594a2a096..702324372bd 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
@@ -15,6 +15,7 @@
#include <memory>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/win32.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
index 68e1725db31..8ead98109a8 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
@@ -19,6 +19,7 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
index d905b9e51c1..6cade2decc0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
@@ -19,6 +19,7 @@
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/system_wrappers/include/atomic32.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
index d1e6632f082..b868b044a81 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
@@ -13,6 +13,7 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_X11_X_SERVER_PIXEL_BUFFER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_X11_X_SERVER_PIXEL_BUFFER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include <X11/Xutil.h>
diff --git a/chromium/third_party/webrtc/modules/include/module_common_types.h b/chromium/third_party/webrtc/modules/include/module_common_types.h
index 82d87d5c5c7..3572cd6fc5c 100644
--- a/chromium/third_party/webrtc/modules/include/module_common_types.h
+++ b/chromium/third_party/webrtc/modules/include/module_common_types.h
@@ -71,9 +71,10 @@ struct RTPVideoHeaderVP8 {
};
enum TemporalStructureMode {
- kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
- kTemporalStructureMode2, // 2 temporal layers 0-1-0-1...
- kTemporalStructureMode3 // 3 temporal layers 0-2-1-2-0-2-1-2...
+ kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
+ kTemporalStructureMode2, // 2 temporal layers 01...
+ kTemporalStructureMode3, // 3 temporal layers 0212...
+ kTemporalStructureMode4 // 3 temporal layers 02120212...
};
struct GofInfoVP9 {
@@ -121,6 +122,52 @@ struct GofInfoVP9 {
pid_diff[3][0] = 1;
pid_diff[3][1] = 2;
break;
+ case kTemporalStructureMode4:
+ num_frames_in_gof = 8;
+ temporal_idx[0] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[0] = 1;
+ pid_diff[0][0] = 4;
+
+ temporal_idx[1] = 2;
+ temporal_up_switch[1] = true;
+ num_ref_pics[1] = 1;
+ pid_diff[1][0] = 1;
+
+ temporal_idx[2] = 1;
+ temporal_up_switch[2] = true;
+ num_ref_pics[2] = 1;
+ pid_diff[2][0] = 2;
+
+ temporal_idx[3] = 2;
+ temporal_up_switch[3] = false;
+ num_ref_pics[3] = 2;
+ pid_diff[3][0] = 1;
+ pid_diff[3][1] = 2;
+
+ temporal_idx[4] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[4] = 1;
+ pid_diff[4][0] = 4;
+
+ temporal_idx[5] = 2;
+ temporal_up_switch[1] = false;
+ num_ref_pics[5] = 2;
+ pid_diff[5][0] = 1;
+ pid_diff[5][1] = 2;
+
+ temporal_idx[6] = 1;
+ temporal_up_switch[2] = false;
+ num_ref_pics[6] = 2;
+ pid_diff[6][0] = 2;
+ pid_diff[6][1] = 4;
+
+ temporal_idx[7] = 2;
+ temporal_up_switch[3] = false;
+ num_ref_pics[7] = 2;
+ pid_diff[7][0] = 1;
+ pid_diff[7][1] = 2;
+ break;
default:
assert(false);
}
@@ -143,6 +190,7 @@ struct GofInfoVP9 {
bool temporal_up_switch[kMaxVp9FramesInGof];
uint8_t num_ref_pics[kMaxVp9FramesInGof];
uint8_t pid_diff[kMaxVp9FramesInGof][kMaxVp9RefPics];
+ uint16_t pid_start;
};
struct RTPVideoHeaderVP9 {
@@ -432,7 +480,6 @@ enum FecMaskType {
// Struct containing forward error correction settings.
struct FecProtectionParams {
int fec_rate;
- bool use_uep_protection;
int max_fec_frames;
FecMaskType fec_mask_type;
};
@@ -447,25 +494,6 @@ class CallStatsObserver {
virtual ~CallStatsObserver() {}
};
-struct VideoContentMetrics {
- VideoContentMetrics()
- : motion_magnitude(0.0f),
- spatial_pred_err(0.0f),
- spatial_pred_err_h(0.0f),
- spatial_pred_err_v(0.0f) {}
-
- void Reset() {
- motion_magnitude = 0.0f;
- spatial_pred_err = 0.0f;
- spatial_pred_err_h = 0.0f;
- spatial_pred_err_v = 0.0f;
- }
- float motion_magnitude;
- float spatial_pred_err;
- float spatial_pred_err_h;
- float spatial_pred_err_v;
-};
-
/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting
* states.
diff --git a/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc b/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
index 76bcca74d2c..27fe9613a3b 100644
--- a/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
+++ b/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
@@ -14,7 +14,6 @@
#include "webrtc/modules/media_file/media_file_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/modules.gyp b/chromium/third_party/webrtc/modules/modules.gyp
index d5df8539051..e11bbfe88b3 100644
--- a/chromium/third_party/webrtc/modules/modules.gyp
+++ b/chromium/third_party/webrtc/modules/modules.gyp
@@ -26,7 +26,6 @@
'video_coding/video_coding.gypi',
'video_capture/video_capture.gypi',
'video_processing/video_processing.gypi',
- 'video_render/video_render.gypi',
],
'conditions': [
['include_tests==1', {
@@ -64,7 +63,6 @@
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
- '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/test/test.gyp:test_support',
@@ -86,7 +84,6 @@
'audio_coding/test/PCMFile.cc',
'audio_coding/test/PacketLossTest.cc',
'audio_coding/test/RTPFile.cc',
- 'audio_coding/test/SpatialAudio.cc',
'audio_coding/test/TestAllCodecs.cc',
'audio_coding/test/TestRedFec.cc',
'audio_coding/test/TestStereo.cc',
@@ -133,6 +130,7 @@
'audio_processing',
'audioproc_test_utils',
'bitrate_controller',
+ 'builtin_audio_decoder_factory',
'bwe_simulator',
'cng',
'isac_fix',
@@ -167,7 +165,7 @@
'<(webrtc_root)/tools/tools.gyp:agc_test_utils',
],
'sources': [
- 'audio_coding/codecs/audio_encoder_unittest.cc',
+ 'audio_coding/codecs/audio_decoder_factory_unittest.cc',
'audio_coding/codecs/cng/audio_encoder_cng_unittest.cc',
'audio_coding/acm2/acm_receiver_unittest_oldapi.cc',
'audio_coding/acm2/audio_coding_module_unittest_oldapi.cc',
@@ -214,6 +212,7 @@
'audio_coding/neteq/post_decode_vad_unittest.cc',
'audio_coding/neteq/random_vector_unittest.cc',
'audio_coding/neteq/sync_buffer_unittest.cc',
+ 'audio_coding/neteq/tick_timer_unittest.cc',
'audio_coding/neteq/timestamp_scaler_unittest.cc',
'audio_coding/neteq/time_stretch_unittest.cc',
'audio_coding/neteq/mock/mock_audio_decoder.h',
@@ -270,6 +269,7 @@
'audio_processing/vad/voice_activity_detector_unittest.cc',
'bitrate_controller/bitrate_controller_unittest.cc',
'bitrate_controller/send_side_bandwidth_estimation_unittest.cc',
+ 'congestion_controller/congestion_controller_unittest.cc',
'media_file/media_file_unittest.cc',
'module_common_types_unittest.cc',
'pacing/bitrate_prober_unittest.cc',
@@ -338,6 +338,7 @@
'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
'rtp_rtcp/source/rtp_format_vp9_unittest.cc',
'rtp_rtcp/source/rtp_packet_history_unittest.cc',
+ 'rtp_rtcp/source/rtp_packet_unittest.cc',
'rtp_rtcp/source/rtp_payload_registry_unittest.cc',
'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc',
'rtp_rtcp/source/rtp_header_extension_unittest.cc',
@@ -361,9 +362,8 @@
'video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc',
'video_coding/codecs/vp8/simulcast_unittest.cc',
'video_coding/codecs/vp8/simulcast_unittest.h',
- 'video_coding/codecs/vp9/screenshare_layers_unittest.cc',
+ 'video_coding/frame_buffer2_unittest.cc',
'video_coding/include/mock/mock_vcm_callbacks.h',
- 'video_coding/bitrate_adjuster_unittest.cc',
'video_coding/decoding_state_unittest.cc',
'video_coding/histogram_unittest.cc',
'video_coding/jitter_buffer_unittest.cc',
@@ -379,19 +379,21 @@
'video_coding/video_coding_robustness_unittest.cc',
'video_coding/video_receiver_unittest.cc',
'video_coding/video_sender_unittest.cc',
- 'video_coding/qm_select_unittest.cc',
'video_coding/test/stream_generator.cc',
'video_coding/test/stream_generator.h',
'video_coding/utility/frame_dropper_unittest.cc',
+ 'video_coding/utility/ivf_file_writer_unittest.cc',
'video_coding/utility/quality_scaler_unittest.cc',
- 'video_processing/test/brightness_detection_test.cc',
- 'video_processing/test/content_metrics_test.cc',
- 'video_processing/test/deflickering_test.cc',
'video_processing/test/denoiser_test.cc',
'video_processing/test/video_processing_unittest.cc',
'video_processing/test/video_processing_unittest.h',
],
'conditions': [
+ ['libvpx_build_vp9==1', {
+ 'sources': [
+ 'video_coding/codecs/vp9/screenshare_layers_unittest.cc',
+ ],
+ }],
['enable_bwe_test_logging==1', {
'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1' ],
}, {
@@ -444,6 +446,7 @@
'sources': [
'audio_processing/audio_processing_impl_locking_unittest.cc',
'audio_processing/audio_processing_impl_unittest.cc',
+ 'audio_processing/audio_processing_unittest.cc',
'audio_processing/echo_control_mobile_unittest.cc',
'audio_processing/echo_cancellation_unittest.cc',
'audio_processing/gain_control_unittest.cc',
@@ -453,7 +456,6 @@
'audio_processing/voice_detection_unittest.cc',
'audio_processing/test/audio_buffer_tools.cc',
'audio_processing/test/audio_buffer_tools.h',
- 'audio_processing/test/audio_processing_unittest.cc',
'audio_processing/test/bitexactness_tools.cc',
'audio_processing/test/bitexactness_tools.h',
'audio_processing/test/debug_dump_replayer.cc',
@@ -504,16 +506,6 @@
'<(DEPTH)/data/voice_engine/audio_tiny48.wav',
'<(DEPTH)/resources/att-downlink.rx',
'<(DEPTH)/resources/att-uplink.rx',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_64.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
'<(DEPTH)/resources/audio_coding/neteq_opus.rtp',
'<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
'<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
@@ -626,10 +618,17 @@
['OS=="android"', {
'targets': [
{
- 'target_name': 'modules_unittests_apk_target',
+ 'target_name': 'audio_codec_speed_tests_apk_target',
'type': 'none',
'dependencies': [
- '<(apk_tests_path):modules_unittests_apk',
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ },
+ {
+ 'target_name': 'audio_decoder_unittests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_decoder_unittests_apk',
],
},
{
@@ -639,8 +638,75 @@
'<(apk_tests_path):modules_tests_apk',
],
},
+ {
+ 'target_name': 'modules_unittests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_unittests_apk',
+ ],
+ },
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'audio_codec_speed_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'audio_decoder_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_decoder_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'audio_decoder_unittests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'modules_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_tests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'modules_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'modules_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'modules_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
@@ -708,19 +774,6 @@
'modules_unittests.isolate',
],
},
- {
- 'target_name': 'video_render_tests_run',
- 'type': 'none',
- 'dependencies': [
- 'video_render_tests',
- ],
- 'includes': [
- '../build/isolate.gypi',
- ],
- 'sources': [
- 'video_render_tests.isolate',
- ],
- },
],
}],
],
diff --git a/chromium/third_party/webrtc/modules/modules_java.gyp b/chromium/third_party/webrtc/modules/modules_java.gyp
index 060de2a0678..2a72fb30a4c 100644
--- a/chromium/third_party/webrtc/modules/modules_java.gyp
+++ b/chromium/third_party/webrtc/modules/modules_java.gyp
@@ -18,14 +18,5 @@
'includes': [ '../../build/java.gypi' ],
}, # audio_device_module_java
- {
- 'target_name': 'video_render_module_java',
- 'type': 'none',
- 'variables': {
- 'java_in_dir': 'video_render/android/java',
- 'additional_src_dirs': [ '../base/java/src', ],
- },
- 'includes': [ '../../build/java.gypi' ],
- }, # video_render_module_java
],
}
diff --git a/chromium/third_party/webrtc/modules/modules_java_chromium.gyp b/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
index 32d2d8d24e8..ebc53d60ff8 100644
--- a/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
+++ b/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
@@ -16,13 +16,5 @@
},
'includes': [ '../../../build/java.gypi' ],
}, # audio_device_module_java
- {
- 'target_name': 'video_render_module_java',
- 'type': 'none',
- 'variables': {
- 'java_in_dir': 'video_render/android/java',
- },
- 'includes': [ '../../../build/java.gypi' ],
- }, # video_render_module_java
],
}
diff --git a/chromium/third_party/webrtc/modules/modules_tests_apk.isolate b/chromium/third_party/webrtc/modules/modules_tests_apk.isolate
new file mode 100644
index 00000000000..ffdd967b4fc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/modules_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'modules_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_modules_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/modules_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_modules_tests',
+ 'modules_tests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/modules_unittests.isolate b/chromium/third_party/webrtc/modules/modules_unittests.isolate
index a7fc4e8661f..af7e6ef46e8 100644
--- a/chromium/third_party/webrtc/modules/modules_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_unittests.isolate
@@ -11,9 +11,6 @@
'variables': {
'files': [
'<(DEPTH)/data/audio_processing/output_data_fixed.pb',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats_android.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats_android.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_android.pcm',
],
},
}],
@@ -25,16 +22,6 @@
'<(DEPTH)/data/voice_engine/audio_tiny48.wav',
'<(DEPTH)/resources/att-downlink.rx',
'<(DEPTH)/resources/att-uplink.rx',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_64.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
'<(DEPTH)/resources/audio_coding/neteq_opus.rtp',
'<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
'<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
diff --git a/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate b/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate
new file mode 100644
index 00000000000..cd60cf75c86
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'modules_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_modules_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/modules_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_modules_unittests',
+ 'modules_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
index fbd9b817419..8e8e36ea34b 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
@@ -15,13 +15,14 @@
#include <limits>
#include <sstream>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/pacing/paced_sender.h"
namespace webrtc {
namespace {
-int ComputeDeltaFromBitrate(size_t packet_size, int bitrate_bps) {
+int ComputeDeltaFromBitrate(size_t packet_size, uint32_t bitrate_bps) {
assert(bitrate_bps > 0);
// Compute the time delta needed to send packet_size bytes at bitrate_bps
// bps. Result is in milliseconds.
@@ -33,8 +34,8 @@ int ComputeDeltaFromBitrate(size_t packet_size, int bitrate_bps) {
BitrateProber::BitrateProber()
: probing_state_(kDisabled),
packet_size_last_send_(0),
- time_last_send_ms_(-1) {
-}
+ time_last_send_ms_(-1),
+ next_cluster_id_(0) {}
void BitrateProber::SetEnabled(bool enable) {
if (enable) {
@@ -52,7 +53,7 @@ bool BitrateProber::IsProbing() const {
return probing_state_ == kProbing;
}
-void BitrateProber::OnIncomingPacket(int bitrate_bps,
+void BitrateProber::OnIncomingPacket(uint32_t bitrate_bps,
size_t packet_size,
int64_t now_ms) {
// Don't initialize probing unless we have something large enough to start
@@ -61,24 +62,24 @@ void BitrateProber::OnIncomingPacket(int bitrate_bps,
return;
if (probing_state_ != kAllowedToProbe)
return;
- probe_bitrates_.clear();
// Max number of packets used for probing.
const int kMaxNumProbes = 2;
const int kPacketsPerProbe = 5;
const float kProbeBitrateMultipliers[kMaxNumProbes] = {3, 6};
- int bitrates_bps[kMaxNumProbes];
std::stringstream bitrate_log;
- bitrate_log << "Start probing for bandwidth, bitrates:";
+ bitrate_log << "Start probing for bandwidth, (bitrate:packets): ";
for (int i = 0; i < kMaxNumProbes; ++i) {
- bitrates_bps[i] = kProbeBitrateMultipliers[i] * bitrate_bps;
- bitrate_log << " " << bitrates_bps[i];
- // We need one extra to get 5 deltas for the first probe.
- if (i == 0)
- probe_bitrates_.push_back(bitrates_bps[i]);
- for (int j = 0; j < kPacketsPerProbe; ++j)
- probe_bitrates_.push_back(bitrates_bps[i]);
+ ProbeCluster cluster;
+ // We need one extra to get 5 deltas for the first probe, therefore (i == 0)
+ cluster.max_probe_packets = kPacketsPerProbe + (i == 0 ? 1 : 0);
+ cluster.probe_bitrate_bps = kProbeBitrateMultipliers[i] * bitrate_bps;
+ cluster.id = next_cluster_id_++;
+
+ bitrate_log << "(" << cluster.probe_bitrate_bps << ":"
+ << cluster.max_probe_packets << ") ";
+
+ clusters_.push(cluster);
}
- bitrate_log << ", num packets: " << probe_bitrates_.size();
LOG(LS_INFO) << bitrate_log.str().c_str();
// Set last send time to current time so TimeUntilNextProbe doesn't short
// circuit due to inactivity.
@@ -87,10 +88,11 @@ void BitrateProber::OnIncomingPacket(int bitrate_bps,
}
int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
- if (probing_state_ != kDisabled && probe_bitrates_.empty()) {
+ if (probing_state_ != kDisabled && clusters_.empty()) {
probing_state_ = kWait;
}
- if (probe_bitrates_.empty() || time_last_send_ms_ == -1) {
+
+ if (clusters_.empty() || time_last_send_ms_ == -1) {
// No probe started, probe finished, or too long since last probe packet.
return -1;
}
@@ -107,8 +109,8 @@ int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
// sent before.
int time_until_probe_ms = 0;
if (packet_size_last_send_ != 0 && probing_state_ == kProbing) {
- int next_delta_ms = ComputeDeltaFromBitrate(packet_size_last_send_,
- probe_bitrates_.front());
+ int next_delta_ms = ComputeDeltaFromBitrate(
+ packet_size_last_send_, clusters_.front().probe_bitrate_bps);
time_until_probe_ms = next_delta_ms - elapsed_time_ms;
// There is no point in trying to probe with less than 1 ms between packets
// as it essentially means trying to probe at infinite bandwidth.
@@ -129,6 +131,12 @@ int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
return std::max(time_until_probe_ms, 0);
}
+int BitrateProber::CurrentClusterId() const {
+ RTC_DCHECK(!clusters_.empty());
+ RTC_DCHECK_EQ(kProbing, probing_state_);
+ return clusters_.front().id;
+}
+
size_t BitrateProber::RecommendedPacketSize() const {
return packet_size_last_send_;
}
@@ -141,7 +149,11 @@ void BitrateProber::PacketSent(int64_t now_ms, size_t packet_size) {
time_last_send_ms_ = now_ms;
if (probing_state_ != kProbing)
return;
- if (!probe_bitrates_.empty())
- probe_bitrates_.pop_front();
+ if (!clusters_.empty()) {
+ ProbeCluster* cluster = &clusters_.front();
+ ++cluster->sent_probe_packets;
+ if (cluster->sent_probe_packets == cluster->max_probe_packets)
+ clusters_.pop();
+ }
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
index 84fbc522fc9..e8967abde45 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
@@ -13,6 +13,7 @@
#include <cstddef>
#include <list>
+#include <queue>
#include "webrtc/typedefs.h"
@@ -34,12 +35,17 @@ class BitrateProber {
// Initializes a new probing session if the prober is allowed to probe. Does
// not initialize the prober unless the packet size is large enough to probe
// with.
- void OnIncomingPacket(int bitrate_bps, size_t packet_size, int64_t now_ms);
+ void OnIncomingPacket(uint32_t bitrate_bps,
+ size_t packet_size,
+ int64_t now_ms);
// Returns the number of milliseconds until the next packet should be sent to
// get accurate probing.
int TimeUntilNextProbe(int64_t now_ms);
+ // Which cluster that is currently being used for probing.
+ int CurrentClusterId() const;
+
// Returns the number of bytes that the prober recommends for the next probe
// packet.
size_t RecommendedPacketSize() const;
@@ -51,13 +57,21 @@ class BitrateProber {
private:
enum ProbingState { kDisabled, kAllowedToProbe, kProbing, kWait };
+ struct ProbeCluster {
+ int max_probe_packets = 0;
+ int sent_probe_packets = 0;
+ int probe_bitrate_bps = 0;
+ int id = -1;
+ };
+
ProbingState probing_state_;
// Probe bitrate per packet. These are used to compute the delta relative to
// the previous probe packet based on the size and time when that packet was
// sent.
- std::list<int> probe_bitrates_;
+ std::queue<ProbeCluster> clusters_;
size_t packet_size_last_send_;
int64_t time_last_send_ms_;
+ int next_cluster_id_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_PACING_BITRATE_PROBER_H_
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc b/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
index 59ee479973d..9e38220e012 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
@@ -26,6 +26,7 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
prober.OnIncomingPacket(300000, 1000, now_ms);
EXPECT_TRUE(prober.IsProbing());
+ EXPECT_EQ(0, prober.CurrentClusterId());
// First packet should probe as soon as possible.
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
@@ -37,12 +38,14 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
EXPECT_EQ(4, prober.TimeUntilNextProbe(now_ms));
now_ms += 4;
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
+ EXPECT_EQ(0, prober.CurrentClusterId());
prober.PacketSent(now_ms, 1000);
}
for (int i = 0; i < 5; ++i) {
EXPECT_EQ(4, prober.TimeUntilNextProbe(now_ms));
now_ms += 4;
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
+ EXPECT_EQ(1, prober.CurrentClusterId());
prober.PacketSent(now_ms, 1000);
}
diff --git a/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h b/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
index c710dbcbea5..bd7d7aaa499 100644
--- a/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
@@ -22,15 +22,17 @@ namespace webrtc {
class MockPacedSender : public PacedSender {
public:
- MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), NULL, 0, 0, 0) {}
+ MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), nullptr) {}
MOCK_METHOD6(SendPacket, bool(Priority priority,
uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
size_t bytes,
bool retransmission));
+ MOCK_METHOD1(SetEstimatedBitrate, void(uint32_t));
MOCK_CONST_METHOD0(QueueInMs, int64_t());
MOCK_CONST_METHOD0(QueueInPackets, int());
+ MOCK_CONST_METHOD0(ExpectedQueueTimeMs, int64_t());
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
index b56d28510f2..167be23ab62 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
@@ -246,20 +246,18 @@ const int64_t PacedSender::kMaxQueueLengthMs = 2000;
const float PacedSender::kDefaultPaceMultiplier = 2.5f;
PacedSender::PacedSender(Clock* clock,
- Callback* callback,
- int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps)
+ PacketSender* packet_sender)
: clock_(clock),
- callback_(callback),
+ packet_sender_(packet_sender),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
paused_(false),
probing_enabled_(true),
- media_budget_(new paced_sender::IntervalBudget(max_bitrate_kbps)),
- padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
+ media_budget_(new paced_sender::IntervalBudget(0)),
+ padding_budget_(new paced_sender::IntervalBudget(0)),
prober_(new BitrateProber()),
- bitrate_bps_(1000 * bitrate_kbps),
- max_bitrate_kbps_(max_bitrate_kbps),
+ estimated_bitrate_bps_(0),
+ min_send_bitrate_kbps_(0u),
+ pacing_bitrate_kbps_(0),
time_last_update_us_(clock->TimeInMicroseconds()),
packets_(new paced_sender::PacketQueue(clock)),
packet_counter_(0) {
@@ -283,16 +281,22 @@ void PacedSender::SetProbingEnabled(bool enabled) {
probing_enabled_ = enabled;
}
-void PacedSender::UpdateBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps) {
+void PacedSender::SetEstimatedBitrate(uint32_t bitrate_bps) {
CriticalSectionScoped cs(critsect_.get());
- // Don't set media bitrate here as it may be boosted in order to meet max
- // queue time constraint. Just update max_bitrate_kbps_ and let media_budget_
- // be updated in Process().
- padding_budget_->set_target_rate_kbps(min_bitrate_kbps);
- bitrate_bps_ = 1000 * bitrate_kbps;
- max_bitrate_kbps_ = max_bitrate_kbps;
+ estimated_bitrate_bps_ = bitrate_bps;
+ pacing_bitrate_kbps_ =
+ std::max(min_send_bitrate_kbps_, estimated_bitrate_bps_ / 1000) *
+ kDefaultPaceMultiplier;
+}
+
+void PacedSender::SetAllocatedSendBitrate(int allocated_bitrate,
+ int padding_bitrate) {
+ CriticalSectionScoped cs(critsect_.get());
+ min_send_bitrate_kbps_ = allocated_bitrate / 1000;
+ pacing_bitrate_kbps_ =
+ std::max(min_send_bitrate_kbps_, estimated_bitrate_bps_ / 1000) *
+ kDefaultPaceMultiplier;
+ padding_budget_->set_target_rate_kbps(padding_bitrate / 1000);
}
void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
@@ -302,11 +306,13 @@ void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
size_t bytes,
bool retransmission) {
CriticalSectionScoped cs(critsect_.get());
+ RTC_DCHECK(estimated_bitrate_bps_ > 0)
+ << "SetEstimatedBitrate must be called before InsertPacket.";
if (probing_enabled_ && !prober_->IsProbing())
prober_->SetEnabled(true);
int64_t now_ms = clock_->TimeInMilliseconds();
- prober_->OnIncomingPacket(bitrate_bps_, bytes, now_ms);
+ prober_->OnIncomingPacket(estimated_bitrate_bps_, bytes, now_ms);
if (capture_time_ms < 0)
capture_time_ms = now_ms;
@@ -318,8 +324,9 @@ void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
int64_t PacedSender::ExpectedQueueTimeMs() const {
CriticalSectionScoped cs(critsect_.get());
- RTC_DCHECK_GT(max_bitrate_kbps_, 0);
- return static_cast<int64_t>(packets_->SizeInBytes() * 8 / max_bitrate_kbps_);
+ RTC_DCHECK_GT(pacing_bitrate_kbps_, 0u);
+ return static_cast<int64_t>(packets_->SizeInBytes() * 8 /
+ pacing_bitrate_kbps_);
}
size_t PacedSender::QueueSizePackets() const {
@@ -360,7 +367,7 @@ void PacedSender::Process() {
CriticalSectionScoped cs(critsect_.get());
int64_t elapsed_time_ms = (now_us - time_last_update_us_ + 500) / 1000;
time_last_update_us_ = now_us;
- int target_bitrate_kbps = max_bitrate_kbps_;
+ int target_bitrate_kbps = pacing_bitrate_kbps_;
// TODO(holmer): Remove the !paused_ check when issue 5307 has been fixed.
if (!paused_ && elapsed_time_ms > 0) {
size_t queue_size_bytes = packets_->SizeInBytes();
@@ -390,8 +397,10 @@ void PacedSender::Process() {
// element from the priority queue but keep it in storage, so that we can
// reinsert it if send fails.
const paced_sender::Packet& packet = packets_->BeginPop();
+ int probe_cluster_id =
+ prober_->IsProbing() ? prober_->CurrentClusterId() : -1;
- if (SendPacket(packet)) {
+ if (SendPacket(packet, probe_cluster_id)) {
// Send succeeded, remove it from the queue.
packets_->FinalizePop(packet);
if (prober_->IsProbing())
@@ -418,17 +427,17 @@ void PacedSender::Process() {
SendPadding(static_cast<size_t>(padding_needed));
}
-bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
+bool PacedSender::SendPacket(const paced_sender::Packet& packet,
+ int probe_cluster_id) {
// TODO(holmer): Because of this bug issue 5307 we have to send audio
// packets even when the pacer is paused. Here we assume audio packets are
// always high priority and that they are the only high priority packets.
if (paused_ && packet.priority != kHighPriority)
return false;
critsect_->Leave();
- const bool success = callback_->TimeToSendPacket(packet.ssrc,
- packet.sequence_number,
- packet.capture_time_ms,
- packet.retransmission);
+ const bool success = packet_sender_->TimeToSendPacket(
+ packet.ssrc, packet.sequence_number, packet.capture_time_ms,
+ packet.retransmission, probe_cluster_id);
critsect_->Enter();
if (success) {
@@ -447,7 +456,7 @@ bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
void PacedSender::SendPadding(size_t padding_needed) {
critsect_->Leave();
- size_t bytes_sent = callback_->TimeToSendPadding(padding_needed);
+ size_t bytes_sent = packet_sender_->TimeToSendPadding(padding_needed);
critsect_->Enter();
if (bytes_sent > 0) {
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.h b/chromium/third_party/webrtc/modules/pacing/paced_sender.h
index 16569b04045..d42b9b38489 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.h
@@ -33,7 +33,7 @@ class PacketQueue;
class PacedSender : public Module, public RtpPacketSender {
public:
- class Callback {
+ class PacketSender {
public:
// Note: packets sent as a result of a callback should not pass by this
// module again.
@@ -42,13 +42,14 @@ class PacedSender : public Module, public RtpPacketSender {
virtual bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) = 0;
+ bool retransmission,
+ int probe_cluster_id) = 0;
// Called when it's a good time to send a padding data.
// Returns the number of bytes sent.
virtual size_t TimeToSendPadding(size_t bytes) = 0;
protected:
- virtual ~Callback() {}
+ virtual ~PacketSender() {}
};
// Expected max pacer delay in ms. If ExpectedQueueTimeMs() is higher than
@@ -56,8 +57,6 @@ class PacedSender : public Module, public RtpPacketSender {
// encoding them). Bitrate sent may temporarily exceed target set by
// UpdateBitrate() so that this limit will be upheld.
static const int64_t kMaxQueueLengthMs;
- // Pace in kbits/s until we receive first estimate.
- static const int kDefaultInitialPaceKbps = 2000;
// Pacing-rate relative to our target send rate.
// Multiplicative factor that is applied to the target bitrate to calculate
// the number of bytes that can be transmitted per interval.
@@ -68,10 +67,7 @@ class PacedSender : public Module, public RtpPacketSender {
static const size_t kMinProbePacketSize = 200;
PacedSender(Clock* clock,
- Callback* callback,
- int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ PacketSender* packet_sender);
virtual ~PacedSender();
@@ -86,14 +82,20 @@ class PacedSender : public Module, public RtpPacketSender {
// effect.
void SetProbingEnabled(bool enabled);
- // Set target bitrates for the pacer.
- // We will pace out bursts of packets at a bitrate of |max_bitrate_kbps|.
- // |bitrate_kbps| is our estimate of what we are allowed to send on average.
- // Padding packets will be utilized to reach |min_bitrate| unless enough media
- // packets are available.
- void UpdateBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ // Sets the estimated capacity of the network. Must be called once before
+ // packets can be sent.
+ // |bitrate_bps| is our estimate of what we are allowed to send on average.
+ // We will pace out bursts of packets at a bitrate of
+ // |bitrate_bps| * kDefaultPaceMultiplier.
+ virtual void SetEstimatedBitrate(uint32_t bitrate_bps);
+
+ // Sets the bitrate that has been allocated for encoders.
+ // |allocated_bitrate| might be higher that the estimated available network
+ // bitrate and if so, the pacer will send with |allocated_bitrate|.
+ // Padding packets will be utilized to reach |padding_bitrate| unless enough
+ // media packets are available.
+ void SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps);
// Returns true if we send the packet now, else it will add the packet
// information to the queue and call TimeToSendPacket when it's time to send.
@@ -129,12 +131,12 @@ class PacedSender : public Module, public RtpPacketSender {
void UpdateBytesPerInterval(int64_t delta_time_in_ms)
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
- bool SendPacket(const paced_sender::Packet& packet)
+ bool SendPacket(const paced_sender::Packet& packet, int probe_cluster_id)
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
void SendPadding(size_t padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
Clock* const clock_;
- Callback* const callback_;
+ PacketSender* const packet_sender_;
std::unique_ptr<CriticalSectionWrapper> critsect_;
bool paused_ GUARDED_BY(critsect_);
@@ -152,8 +154,9 @@ class PacedSender : public Module, public RtpPacketSender {
std::unique_ptr<BitrateProber> prober_ GUARDED_BY(critsect_);
// Actual configured bitrates (media_budget_ may temporarily be higher in
// order to meet pace time constraint).
- int bitrate_bps_ GUARDED_BY(critsect_);
- int max_bitrate_kbps_ GUARDED_BY(critsect_);
+ uint32_t estimated_bitrate_bps_ GUARDED_BY(critsect_);
+ uint32_t min_send_bitrate_kbps_ GUARDED_BY(critsect_);
+ uint32_t pacing_bitrate_kbps_ GUARDED_BY(critsect_);
int64_t time_last_update_us_ GUARDED_BY(critsect_);
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
index 941c81335b6..6a0a006c326 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -22,32 +22,33 @@ using testing::Return;
namespace webrtc {
namespace test {
-static const int kTargetBitrate = 800;
-static const float kPaceMultiplier = 1.5f;
+static const int kTargetBitrateBps = 800000;
-class MockPacedSenderCallback : public PacedSender::Callback {
+class MockPacedSenderCallback : public PacedSender::PacketSender {
public:
- MOCK_METHOD4(TimeToSendPacket,
+ MOCK_METHOD5(TimeToSendPacket,
bool(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission));
+ bool retransmission,
+ int probe_cluster_id));
MOCK_METHOD1(TimeToSendPadding,
size_t(size_t bytes));
};
-class PacedSenderPadding : public PacedSender::Callback {
+class PacedSenderPadding : public PacedSender::PacketSender {
public:
PacedSenderPadding() : padding_sent_(0) {}
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) override {
return true;
}
- size_t TimeToSendPadding(size_t bytes) {
+ size_t TimeToSendPadding(size_t bytes) override {
const size_t kPaddingPacketSize = 224;
size_t num_packets = (bytes + kPaddingPacketSize - 1) / kPaddingPacketSize;
padding_sent_ += kPaddingPacketSize * num_packets;
@@ -60,7 +61,7 @@ class PacedSenderPadding : public PacedSender::Callback {
size_t padding_sent_;
};
-class PacedSenderProbing : public PacedSender::Callback {
+class PacedSenderProbing : public PacedSender::PacketSender {
public:
PacedSenderProbing(const std::list<int>& expected_deltas, Clock* clock)
: prev_packet_time_ms_(-1),
@@ -71,12 +72,13 @@ class PacedSenderProbing : public PacedSender::Callback {
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) override {
ExpectAndCountPacket();
return true;
}
- size_t TimeToSendPadding(size_t bytes) {
+ size_t TimeToSendPadding(size_t bytes) override {
ExpectAndCountPacket();
return bytes;
}
@@ -108,15 +110,14 @@ class PacedSenderTest : public ::testing::Test {
PacedSenderTest() : clock_(123456) {
srand(0);
// Need to initialize PacedSender after we initialize clock.
- send_bucket_.reset(new PacedSender(&clock_,
- &callback_,
- kTargetBitrate,
- kPaceMultiplier * kTargetBitrate,
- 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback_));
// Default to bitrate probing disabled for testing purposes. Probing tests
// have to enable probing, either by creating a new PacedSender instance or
// by calling SetProbingEnabled(true).
send_bucket_->SetProbingEnabled(false);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+
+ clock_.AdvanceTimeMilliseconds(send_bucket_->TimeUntilNextProcess());
}
void SendAndExpectPacket(PacedSender::Priority priority,
@@ -127,8 +128,8 @@ class PacedSenderTest : public ::testing::Test {
bool retransmission) {
send_bucket_->InsertPacket(priority, ssrc, sequence_number, capture_time_ms,
size, retransmission);
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillRepeatedly(Return(true));
}
@@ -141,29 +142,21 @@ class PacedSenderTest : public ::testing::Test {
TEST_F(PacedSenderTest, QueuePacket) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
+
int64_t queued_packet_timestamp = clock_.TimeInMilliseconds();
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, queued_packet_timestamp, 250,
false);
+ EXPECT_EQ(packets_to_send + 1, send_bucket_->QueueSizePackets());
send_bucket_->Process();
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
@@ -171,86 +164,79 @@ TEST_F(PacedSenderTest, QueuePacket) {
EXPECT_EQ(1, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(1);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number++, queued_packet_timestamp, false))
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number++,
+ queued_packet_timestamp, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Process();
sequence_number++;
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
+
+ // We can send packets_to_send -1 packets of size 250 during the current
+ // interval since one packet has already been sent.
+ for (size_t i = 0; i < packets_to_send - 1; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
+ EXPECT_EQ(packets_to_send, send_bucket_->QueueSizePackets());
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, PaceQueuedPackets) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- for (int i = 0; i < 3; ++i) {
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
}
- for (int j = 0; j < 30; ++j) {
+
+ for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
}
+ EXPECT_EQ(packets_to_send_per_interval + packets_to_send_per_interval * 10,
+ send_bucket_->QueueSizePackets());
send_bucket_->Process();
+ EXPECT_EQ(packets_to_send_per_interval * 10,
+ send_bucket_->QueueSizePackets());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
for (int k = 0; k < 10; ++k) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, _, false))
- .Times(3)
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, _, false, _))
+ .Times(packets_to_send_per_interval)
.WillRepeatedly(Return(true));
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
}
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
send_bucket_->Process();
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, clock_.TimeInMilliseconds(), 250,
false);
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
@@ -258,18 +244,18 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
uint16_t sequence_number = 1234;
uint16_t queued_sequence_number;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- for (int i = 0; i < 3; ++i) {
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
}
queued_sequence_number = sequence_number;
- for (int j = 0; j < 30; ++j) {
+ for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) {
// Send in duplicate packets.
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, clock_.TimeInMilliseconds(),
@@ -284,9 +270,9 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
- for (int i = 0; i < 3; ++i) {
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, queued_sequence_number++, _, false))
+ TimeToSendPacket(ssrc, queued_sequence_number++, _, false, _))
.Times(1)
.WillRepeatedly(Return(true));
}
@@ -297,28 +283,16 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, CanQueuePacketsWithSameSequenceNumberOnDifferentSsrcs) {
@@ -348,29 +322,27 @@ TEST_F(PacedSenderTest, Padding) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
// No padding is expected since we have sent too much already.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
+ EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
+ send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
+
+ // 5 milliseconds later should not send padding since we filled the buffers
+ // initially.
+ EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
@@ -391,8 +363,9 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
int64_t capture_time_ms = 56789;
const int kTimeStep = 5;
const int64_t kBitrateWindow = 100;
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
int64_t start_time = clock_.TimeInMilliseconds();
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
SendAndExpectPacket(PacedSender::kNormalPriority,
@@ -401,10 +374,10 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
capture_time_ms,
250,
false);
- clock_.AdvanceTimeMilliseconds(kTimeStep);
EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(1).
WillOnce(Return(250));
send_bucket_->Process();
+ clock_.AdvanceTimeMilliseconds(kTimeStep);
}
}
@@ -415,11 +388,11 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
const int kTimeStep = 5;
const int64_t kBitrateWindow = 10000;
PacedSenderPadding callback;
- send_bucket_.reset(new PacedSender(
- &clock_, &callback, kTargetBitrate, kPaceMultiplier * kTargetBitrate, 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
send_bucket_->SetProbingEnabled(false);
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
int64_t start_time = clock_.TimeInMilliseconds();
size_t media_bytes = 0;
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
@@ -432,9 +405,10 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
clock_.AdvanceTimeMilliseconds(kTimeStep);
send_bucket_->Process();
}
- EXPECT_NEAR(kTargetBitrate,
+ EXPECT_NEAR(kTargetBitrateBps / 1000,
static_cast<int>(8 * (media_bytes + callback.padding_sent()) /
- kBitrateWindow), 1);
+ kBitrateWindow),
+ 1);
}
TEST_F(PacedSenderTest, Priority) {
@@ -444,54 +418,45 @@ TEST_F(PacedSenderTest, Priority) {
int64_t capture_time_ms = 56789;
int64_t capture_time_ms_low_priority = 1234567;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kLowPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
// Expect normal and low priority to be queued and high to pass through.
send_bucket_->InsertPacket(PacedSender::kLowPriority, ssrc_low_priority,
sequence_number++, capture_time_ms_low_priority,
250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number++, capture_time_ms, 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kHighPriority, ssrc,
sequence_number++, capture_time_ms, 250, false);
// Expect all high and normal priority to be sent out first.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false))
- .Times(4)
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false, _))
+ .Times(packets_to_send_per_interval + 1)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
EXPECT_CALL(callback_,
- TimeToSendPacket(
- ssrc_low_priority, _, capture_time_ms_low_priority, false))
+ TimeToSendPacket(ssrc_low_priority, _,
+ capture_time_ms_low_priority, false, _))
.Times(1)
.WillRepeatedly(Return(true));
@@ -513,23 +478,30 @@ TEST_F(PacedSenderTest, HighPrioDoesntAffectBudget) {
capture_time_ms, 250, false);
}
send_bucket_->Process();
- // Low prio packets does affect the budget, so we should only be able to send
- // 3 at once, the 4th should be queued.
- for (int i = 0; i < 3; ++i) {
+ // Low prio packets does affect the budget.
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
SendAndExpectPacket(PacedSender::kLowPriority, ssrc, sequence_number++,
- capture_time_ms, 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
}
send_bucket_->InsertPacket(PacedSender::kLowPriority, ssrc, sequence_number,
capture_time_ms, 250, false);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
send_bucket_->Process();
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number++, capture_time_ms, false))
- .Times(1);
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number++,
+ capture_time_ms, false, _))
+ .Times(1)
+ .WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, Pause) {
@@ -540,25 +512,16 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_EQ(0, send_bucket_->QueueInMs());
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kLowPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
+
send_bucket_->Process();
send_bucket_->Pause();
@@ -583,7 +546,7 @@ TEST_F(PacedSenderTest, Pause) {
// Expect no packet to come out while paused.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _)).Times(0);
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, _)).Times(0);
for (int i = 0; i < 10; ++i) {
clock_.AdvanceTimeMilliseconds(5);
@@ -592,10 +555,11 @@ TEST_F(PacedSenderTest, Pause) {
}
// Expect high prio packets to come out first followed by all packets in the
// way they were added.
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false, _))
.Times(3)
.WillRepeatedly(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(_, _, second_capture_time_ms, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Resume();
@@ -624,8 +588,8 @@ TEST_F(PacedSenderTest, ResendPacket) {
EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms,
send_bucket_->QueueInMs());
// Fails to send first packet so only one call.
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillOnce(Return(false));
clock_.AdvanceTimeMilliseconds(10000);
@@ -636,13 +600,12 @@ TEST_F(PacedSenderTest, ResendPacket) {
send_bucket_->QueueInMs());
// Fails to send second packet.
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillOnce(Return(true));
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number + 1, capture_time_ms + 1, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
+ capture_time_ms + 1, false, _))
.Times(1)
.WillOnce(Return(false));
clock_.AdvanceTimeMilliseconds(10000);
@@ -653,9 +616,8 @@ TEST_F(PacedSenderTest, ResendPacket) {
send_bucket_->QueueInMs());
// Send second packet and queue becomes empty.
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number + 1, capture_time_ms + 1, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
+ capture_time_ms + 1, false, _))
.Times(1)
.WillOnce(Return(true));
clock_.AdvanceTimeMilliseconds(10000);
@@ -668,18 +630,18 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
uint16_t sequence_number = 1234;
const size_t kNumPackets = 60;
const size_t kPacketSize = 1200;
- const int32_t kMaxBitrate = kPaceMultiplier * 30;
+ const int32_t kMaxBitrate = PacedSender::kDefaultPaceMultiplier * 30000;
EXPECT_EQ(0, send_bucket_->ExpectedQueueTimeMs());
- send_bucket_->UpdateBitrate(30, kMaxBitrate, 0);
+ send_bucket_->SetEstimatedBitrate(30000);
for (size_t i = 0; i < kNumPackets; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize, false);
}
- // Queue in ms = 1000 * (bytes in queue) / (kbit per second * 1000 / 8)
+ // Queue in ms = 1000 * (bytes in queue) *8 / (bits per second)
int64_t queue_in_ms =
- static_cast<int64_t>(kNumPackets * kPacketSize * 8 / kMaxBitrate);
+ static_cast<int64_t>(1000 * kNumPackets * kPacketSize * 8 / kMaxBitrate);
EXPECT_EQ(queue_in_ms, send_bucket_->ExpectedQueueTimeMs());
int64_t time_start = clock_.TimeInMilliseconds();
@@ -697,7 +659,7 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
// Allow for aliasing, duration should be within one pack of max time limit.
EXPECT_NEAR(duration, PacedSender::kMaxQueueLengthMs,
- static_cast<int64_t>(kPacketSize * 8 / kMaxBitrate));
+ static_cast<int64_t>(1000 * kPacketSize * 8 / kMaxBitrate));
}
TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
@@ -705,7 +667,7 @@ TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, kPaceMultiplier * 30, 0);
+ send_bucket_->SetEstimatedBitrate(30000);
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number,
@@ -723,25 +685,23 @@ TEST_F(PacedSenderTest, ProbingWithInitialFrame) {
const int kNumPackets = 11;
const int kNumDeltas = kNumPackets - 1;
const size_t kPacketSize = 1200;
- const int kInitialBitrateKbps = 300;
+ const int kInitialBitrateBps = 300000;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
+
const int expected_deltas[kNumDeltas] = {10, 10, 10, 10, 10, 5, 5, 5, 5, 5};
std::list<int> expected_deltas_list(expected_deltas,
expected_deltas + kNumDeltas);
PacedSenderProbing callback(expected_deltas_list, &clock_);
- send_bucket_.reset(
- new PacedSender(&clock_,
- &callback,
- kInitialBitrateKbps,
- kPaceMultiplier * kInitialBitrateKbps,
- 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
+ send_bucket_->SetEstimatedBitrate(kInitialBitrateBps);
for (int i = 0; i < kNumPackets; ++i) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
kPacketSize, false);
}
+
while (callback.packets_sent() < kNumPackets) {
int time_until_process = send_bucket_->TimeUntilNextProcess();
if (time_until_process <= 0) {
@@ -756,15 +716,15 @@ TEST_F(PacedSenderTest, ProbingWithTooSmallInitialFrame) {
const int kNumPackets = 11;
const int kNumDeltas = kNumPackets - 1;
const size_t kPacketSize = 1200;
- const int kInitialBitrateKbps = 300;
+ const int kInitialBitrateBps = 300000;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
const int expected_deltas[kNumDeltas] = {10, 10, 10, 10, 10, 5, 5, 5, 5, 5};
std::list<int> expected_deltas_list(expected_deltas,
expected_deltas + kNumDeltas);
PacedSenderProbing callback(expected_deltas_list, &clock_);
- send_bucket_.reset(new PacedSender(&clock_, &callback, kInitialBitrateKbps,
- kPaceMultiplier * kInitialBitrateKbps, 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
+ send_bucket_->SetEstimatedBitrate(kInitialBitrateBps);
for (int i = 0; i < kNumPackets - 5; ++i) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
@@ -810,18 +770,22 @@ TEST_F(PacedSenderTest, PriorityInversion) {
// Packets from earlier frames should be sent first.
{
::testing::InSequence sequence;
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
- clock_.TimeInMilliseconds(), true))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number,
+ clock_.TimeInMilliseconds(), true, _))
+ .WillOnce(Return(true));
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 1,
+ clock_.TimeInMilliseconds(), true, _))
+ .WillOnce(Return(true));
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 3,
+ clock_.TimeInMilliseconds() + 33, true, _))
.WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
- clock_.TimeInMilliseconds(), true))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 2,
+ clock_.TimeInMilliseconds() + 33, true, _))
.WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 3,
- clock_.TimeInMilliseconds() + 33,
- true)).WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 2,
- clock_.TimeInMilliseconds() + 33,
- true)).WillOnce(Return(true));
while (send_bucket_->QueueSizePackets() > 0) {
int time_until_process = send_bucket_->TimeUntilNextProcess();
@@ -839,21 +803,22 @@ TEST_F(PacedSenderTest, PaddingOveruse) {
uint16_t sequence_number = 1234;
const size_t kPacketSize = 1200;
- // Min bitrate 0 => no padding, padding budget will stay at 0.
- send_bucket_->UpdateBitrate(60, 90, 0);
+ send_bucket_->Process();
+ send_bucket_->SetEstimatedBitrate(60000);
+ send_bucket_->SetAllocatedSendBitrate(60000, 0);
+
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize, false);
send_bucket_->Process();
// Add 30kbit padding. When increasing budget, media budget will increase from
- // negative (overuse) while padding budget will increase form 0.
+ // negative (overuse) while padding budget will increase from 0.
clock_.AdvanceTimeMilliseconds(5);
- send_bucket_->UpdateBitrate(60, 90, 30);
-
- send_bucket_->InsertPacket(PacedSender::kHighPriority, ssrc,
- sequence_number++, clock_.TimeInMilliseconds(),
- kPacketSize, false);
+ send_bucket_->SetAllocatedSendBitrate(60000, 30000);
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), kPacketSize, false);
+ EXPECT_LT(5u, send_bucket_->ExpectedQueueTimeMs());
// Don't send padding if queue is non-empty, even if padding budget > 0.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
send_bucket_->Process();
@@ -864,9 +829,8 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
uint16_t sequence_number = 1234;
const size_t kPacketSize = 1200;
const int kBitrateBps = 10 * kPacketSize * 8; // 10 packets per second.
- const int kBitrateKbps = (kBitrateBps + 500) / 1000;
- send_bucket_->UpdateBitrate(kBitrateKbps, kBitrateKbps, kBitrateKbps);
+ send_bucket_->SetEstimatedBitrate(kBitrateBps);
EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
@@ -885,7 +849,7 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
// Only first packet (queued for 20ms) should be removed, leave the second
// packet (queued for 10ms) alone in the queue.
EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
- first_capture_time, false))
+ first_capture_time, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Process();
@@ -894,7 +858,7 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
clock_.AdvanceTimeMilliseconds(10);
EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
- first_capture_time + 10, false))
+ first_capture_time + 10, false, _))
.Times(1)
.WillRepeatedly(Return(true));
for (int i = 0; i < 3; ++i) {
@@ -905,5 +869,37 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
}
+TEST_F(PacedSenderTest, ProbeClusterId) {
+ uint32_t ssrc = 12346;
+ uint16_t sequence_number = 1234;
+ const size_t kPacketSize = 1200;
+
+ send_bucket_->SetProbingEnabled(true);
+ for (int i = 0; i < 11; ++i) {
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number + i, clock_.TimeInMilliseconds(),
+ kPacketSize, false);
+ }
+
+ // First probing cluster.
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, 0))
+ .Times(6)
+ .WillRepeatedly(Return(true));
+ for (int i = 0; i < 6; ++i)
+ send_bucket_->Process();
+
+ // Second probing cluster.
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, 1))
+ .Times(5)
+ .WillRepeatedly(Return(true));
+ for (int i = 0; i < 5; ++i)
+ send_bucket_->Process();
+
+ // No more probing packets.
+ EXPECT_CALL(callback_, TimeToSendPadding(_))
+ .Times(1);
+ send_bucket_->Process();
+}
+
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.cc b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
index 5c7a7ab29a3..1884958aca4 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
@@ -43,7 +43,8 @@ void PacketRouter::RemoveRtpModule(RtpRtcp* rtp_module) {
bool PacketRouter::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_timestamp,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) {
RTC_DCHECK(pacer_thread_checker_.CalledOnValidThread());
rtc::CritScope cs(&modules_crit_);
for (auto* rtp_module : rtp_modules_) {
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.h b/chromium/third_party/webrtc/modules/pacing/packet_router.h
index 635b931225c..81d85404eee 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.h
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.h
@@ -30,7 +30,7 @@ class TransportFeedback;
// PacketRouter routes outgoing data to the correct sending RTP module, based
// on the simulcast layer in RTPVideoHeader.
-class PacketRouter : public PacedSender::Callback,
+class PacketRouter : public PacedSender::PacketSender,
public TransportSequenceNumberAllocator {
public:
PacketRouter();
@@ -43,7 +43,8 @@ class PacketRouter : public PacedSender::Callback,
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_timestamp,
- bool retransmission) override;
+ bool retransmission,
+ int probe_cluster_id) override;
size_t TimeToSendPadding(size_t bytes) override;
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
index faf270ced37..006b9f2bf48 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
@@ -53,7 +53,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
.WillOnce(Return(true));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// Send on the second module by letting rtp_2 be sending, but not rtp_1.
++sequence_number;
@@ -69,7 +69,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
.Times(1)
.WillOnce(Return(true));
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc2, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// No module is sending, hence no packet should be sent.
EXPECT_CALL(rtp_1, SendingMedia()).Times(1).WillOnce(Return(false));
@@ -77,7 +77,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_2, SendingMedia()).Times(1).WillOnce(Return(false));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// Add a packet with incorrect ssrc and test it's dropped in the router.
EXPECT_CALL(rtp_1, SendingMedia()).Times(1).WillOnce(Return(true));
@@ -87,7 +87,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_1, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1 + kSsrc2, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
packet_router_->RemoveRtpModule(&rtp_1);
@@ -97,7 +97,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_2, SSRC()).Times(1).WillOnce(Return(kSsrc2));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
packet_router_->RemoveRtpModule(&rtp_2);
}
@@ -167,7 +167,7 @@ TEST_F(PacketRouterTest, SenderOnlyFunctionsRespectSendingMedia) {
// Verify that TimeToSendPacket does not end up in a receiver.
EXPECT_CALL(rtp, TimeToSendPacket(_, _, _, _)).Times(0);
- EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc, 1, 1, false));
+ EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc, 1, 1, false, -1));
// Verify that TimeToSendPadding does not end up in a receiver.
EXPECT_CALL(rtp, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(0u, packet_router_->TimeToSendPadding(200));
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
index 4967913558c..d44788be274 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -11,6 +11,7 @@
#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet_receiver.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
index 2c35df872bc..61773037d74 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
@@ -28,7 +28,7 @@ class MockRemoteBitrateEstimator : public RemoteBitrateEstimator {
public:
MOCK_METHOD1(IncomingPacketFeedbackVector,
void(const std::vector<PacketInfo>&));
- MOCK_METHOD4(IncomingPacket, void(int64_t, size_t, const RTPHeader&, bool));
+ MOCK_METHOD3(IncomingPacket, void(int64_t, size_t, const RTPHeader&));
MOCK_METHOD1(RemoveStream, void(uint32_t));
MOCK_CONST_METHOD2(LatestEstimate, bool(std::vector<uint32_t>*, uint32_t*));
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index e56c273fc4b..d7d8d2c8d2e 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -58,8 +58,7 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
// Note that |arrival_time_ms| can be of an arbitrary time base.
virtual void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) = 0;
+ const RTPHeader& header) = 0;
// Removes all data for |ssrc|.
virtual void RemoveStream(uint32_t ssrc) = 0;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
index a643c1f1030..83e87f95712 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
@@ -24,7 +24,7 @@ class SendTimeHistory {
SendTimeHistory(Clock* clock, int64_t packet_age_limit);
virtual ~SendTimeHistory();
- void AddAndRemoveOld(uint16_t sequence_number, size_t length, bool was_paced);
+ void AddAndRemoveOld(uint16_t sequence_number, size_t length);
bool OnSentPacket(uint16_t sequence_number, int64_t timestamp);
// Look up PacketInfo for a sent packet, based on the sequence number, and
// populate all fields except for receive_time. The packet parameter must
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
index f75bc2b03ea..1b7ce07583d 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
@@ -18,7 +18,7 @@
namespace webrtc {
-static const int kBurstDeltaThresholdMs = 5;
+static const int kBurstDeltaThresholdMs = 5;
InterArrival::InterArrival(uint32_t timestamp_group_length_ticks,
double timestamp_to_ms_coeff,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
index 32663d729b8..7b20cf7f9db 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
@@ -62,6 +62,7 @@
'type': 'static_library',
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
],
'sources': [
'test/bwe.cc',
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index 7c2abb2f08f..82335055b43 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -14,12 +14,12 @@
#include <algorithm>
+#include "webrtc/base/checks.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -79,23 +79,19 @@ bool RemoteBitrateEstimatorAbsSendTime::IsWithinClusterBounds(
}
RemoteBitrateEstimatorAbsSendTime::RemoteBitrateEstimatorAbsSendTime(
- RemoteBitrateObserver* observer,
- Clock* clock)
+ RemoteBitrateObserver* observer)
: observer_(observer),
inter_arrival_(),
- estimator_(OverUseDetectorOptions()),
+ estimator_(),
detector_(OverUseDetectorOptions()),
incoming_bitrate_(kBitrateWindowMs, 8000),
total_probes_received_(0),
first_packet_time_ms_(-1),
last_update_ms_(-1),
- ssrcs_(),
- clock_(clock) {
+ ssrcs_() {
RTC_DCHECK(observer_);
- RTC_DCHECK(clock_);
LOG(LS_INFO) << "RemoteBitrateEstimatorAbsSendTime: Instantiating.";
network_thread_.DetachFromThread();
- process_thread_.DetachFromThread();
}
void RemoteBitrateEstimatorAbsSendTime::ComputeClusters(
@@ -180,8 +176,7 @@ RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) {
std::min(best_it->GetSendBitrateBps(), best_it->GetRecvBitrateBps());
// Make sure that a probe sent on a lower bitrate than our estimate can't
// reduce the estimate.
- if (IsBitrateImproving(probe_bitrate_bps) &&
- probe_bitrate_bps > static_cast<int>(incoming_bitrate_.Rate(now_ms))) {
+ if (IsBitrateImproving(probe_bitrate_bps)) {
LOG(LS_INFO) << "Probe successful, sent at "
<< best_it->GetSendBitrateBps() << " bps, received at "
<< best_it->GetRecvBitrateBps()
@@ -215,14 +210,14 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketFeedbackVector(
for (const auto& packet_info : packet_feedback_vector) {
IncomingPacketInfo(packet_info.arrival_time_ms,
ConvertMsTo24Bits(packet_info.send_time_ms),
- packet_info.payload_size, 0, packet_info.was_paced);
+ packet_info.payload_size, 0);
}
}
-void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(int64_t arrival_time_ms,
- size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(
+ int64_t arrival_time_ms,
+ size_t payload_size,
+ const RTPHeader& header) {
RTC_DCHECK(network_thread_.CalledOnValidThread());
if (!header.extension.hasAbsoluteSendTime) {
LOG(LS_WARNING) << "RemoteBitrateEstimatorAbsSendTimeImpl: Incoming packet "
@@ -230,36 +225,31 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(int64_t arrival_time_ms,
return;
}
IncomingPacketInfo(arrival_time_ms, header.extension.absoluteSendTime,
- payload_size, header.ssrc, was_paced);
+ payload_size, header.ssrc);
}
void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
int64_t arrival_time_ms,
uint32_t send_time_24bits,
size_t payload_size,
- uint32_t ssrc,
- bool was_paced) {
+ uint32_t ssrc) {
assert(send_time_24bits < (1ul << 24));
// Shift up send time to use the full 32 bits that inter_arrival works with,
// so wrapping works properly.
uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift;
int64_t send_time_ms = static_cast<int64_t>(timestamp) * kTimestampToMs;
- int64_t now_ms = clock_->TimeInMilliseconds();
+ int64_t now_ms = arrival_time_ms;
// TODO(holmer): SSRCs are only needed for REMB, should be broken out from
// here.
incoming_bitrate_.Update(payload_size, now_ms);
if (first_packet_time_ms_ == -1)
- first_packet_time_ms_ = clock_->TimeInMilliseconds();
+ first_packet_time_ms_ = arrival_time_ms;
uint32_t ts_delta = 0;
int64_t t_delta = 0;
int size_delta = 0;
- // For now only try to detect probes while we don't have a valid estimate, and
- // make sure the packet was paced. We currently assume that only packets
- // larger than 200 bytes are paced by the sender.
- was_paced = was_paced && payload_size > PacedSender::kMinProbePacketSize;
bool update_estimate = false;
uint32_t target_bitrate_bps = 0;
std::vector<uint32_t> ssrcs;
@@ -267,9 +257,14 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
rtc::CritScope lock(&crit_);
TimeoutStreams(now_ms);
+ RTC_DCHECK(inter_arrival_.get());
+ RTC_DCHECK(estimator_.get());
ssrcs_[ssrc] = now_ms;
- if (was_paced &&
+ // For now only try to detect probes while we don't have a valid estimate.
+ // We currently assume that only packets larger than 200 bytes are paced by
+ // the sender.
+ if (payload_size > PacedSender::kMinProbePacketSize &&
(!remote_rate_.ValidEstimate() ||
now_ms - first_packet_time_ms_ < kInitialProbingIntervalMs)) {
// TODO(holmer): Use a map instead to get correct order?
@@ -295,9 +290,9 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
if (inter_arrival_->ComputeDeltas(timestamp, arrival_time_ms, payload_size,
&ts_delta, &t_delta, &size_delta)) {
double ts_delta_ms = (1000.0 * ts_delta) / (1 << kInterArrivalShift);
- estimator_.Update(t_delta, ts_delta_ms, size_delta, detector_.State());
- detector_.Detect(estimator_.offset(), ts_delta_ms,
- estimator_.num_of_deltas(), arrival_time_ms);
+ estimator_->Update(t_delta, ts_delta_ms, size_delta, detector_.State());
+ detector_.Detect(estimator_->offset(), ts_delta_ms,
+ estimator_->num_of_deltas(), arrival_time_ms);
}
if (!update_estimate) {
@@ -319,7 +314,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
// and the target bitrate is too high compared to what we are receiving.
const RateControlInput input(detector_.State(),
incoming_bitrate_.Rate(now_ms),
- estimator_.var_noise());
+ estimator_->var_noise());
remote_rate_.Update(&input, now_ms);
target_bitrate_bps = remote_rate_.UpdateBandwidthEstimate(now_ms);
update_estimate = remote_rate_.ValidEstimate();
@@ -352,6 +347,7 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
inter_arrival_.reset(
new InterArrival((kTimestampGroupLengthMs << kInterArrivalShift) / 1000,
kTimestampToMs, true));
+ estimator_.reset(new OveruseEstimator(OverUseDetectorOptions()));
// We deliberately don't reset the first_packet_time_ms_ here for now since
// we only probe for bandwidth in the beginning of a call right now.
}
@@ -359,7 +355,6 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
void RemoteBitrateEstimatorAbsSendTime::OnRttUpdate(int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- RTC_DCHECK(process_thread_.CalledOnValidThread());
rtc::CritScope lock(&crit_);
remote_rate_.SetRtt(avg_rtt_ms);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
index 1f47dc3b244..9403a6cc8b9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
@@ -17,6 +17,7 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/rate_statistics.h"
#include "webrtc/base/thread_checker.h"
@@ -67,8 +68,7 @@ struct Cluster {
class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
public:
- RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer,
- Clock* clock);
+ explicit RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer);
virtual ~RemoteBitrateEstimatorAbsSendTime() {}
void IncomingPacketFeedbackVector(
@@ -76,8 +76,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
// This class relies on Process() being called periodically (at least once
// every other second) for streams to be timed out properly. Therefore it
// shouldn't be detached from the ProcessThread except if it's about to be
@@ -102,8 +101,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
void IncomingPacketInfo(int64_t arrival_time_ms,
uint32_t send_time_24bits,
size_t payload_size,
- uint32_t ssrc,
- bool was_paced);
+ uint32_t ssrc);
void ComputeClusters(std::list<Cluster>* clusters) const;
@@ -121,7 +119,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
rtc::ThreadChecker network_thread_;
RemoteBitrateObserver* const observer_;
std::unique_ptr<InterArrival> inter_arrival_;
- OveruseEstimator estimator_;
+ std::unique_ptr<OveruseEstimator> estimator_;
OveruseDetector detector_;
RateStatistics incoming_bitrate_;
std::vector<int> recent_propagation_delta_ms_;
@@ -131,11 +129,9 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
int64_t first_packet_time_ms_;
int64_t last_update_ms_;
- rtc::ThreadChecker process_thread_;
rtc::CriticalSection crit_;
Ssrcs ssrcs_ GUARDED_BY(&crit_);
AimdRateControl remote_rate_ GUARDED_BY(&crit_);
- Clock* const clock_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorAbsSendTime);
};
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
index e8026a5764d..a4e4150e769 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
@@ -20,7 +20,7 @@ class RemoteBitrateEstimatorAbsSendTimeTest :
RemoteBitrateEstimatorAbsSendTimeTest() {}
virtual void SetUp() {
bitrate_estimator_.reset(new RemoteBitrateEstimatorAbsSendTime(
- bitrate_observer_.get(), &clock_));
+ bitrate_observer_.get()));
}
protected:
RTC_DISALLOW_COPY_AND_ASSIGN(RemoteBitrateEstimatorAbsSendTimeTest);
@@ -35,15 +35,15 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, RateIncreaseReordering) {
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, RateIncreaseRtpTimestamps) {
- RateIncreaseRtpTimestampsTestHelper(1232);
+ RateIncreaseRtpTimestampsTestHelper(1229);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropOneStream) {
- CapacityDropTestHelper(1, false, 633);
+ CapacityDropTestHelper(1, false, 667);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropOneStreamWrap) {
- CapacityDropTestHelper(1, true, 633);
+ CapacityDropTestHelper(1, true, 667);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropTwoStreamsWrap) {
@@ -90,7 +90,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProcessAfterTimeout) {
// RemoteBitrateEstimator.
const int64_t kStreamTimeOutMs = 2000;
const int64_t kProcessIntervalMs = 1000;
- IncomingPacket(0, 1000, clock_.TimeInMilliseconds(), 0, 0, true);
+ IncomingPacket(0, 1000, clock_.TimeInMilliseconds(), 0, 0);
clock_.AdvanceTimeMilliseconds(kStreamTimeOutMs + 1);
// Trigger timeout.
bitrate_estimator_->Process();
@@ -106,16 +106,14 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetection) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
// Second burst sent at 8 * 1000 / 5 = 1600 kbps.
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(5);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -132,12 +130,10 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(5);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
// Non-paced packet, arriving 5 ms after.
clock_.AdvanceTimeMilliseconds(5);
- IncomingPacket(0, 100, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- false);
+ IncomingPacket(0, 100, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -158,7 +154,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
now_ms = clock_.TimeInMilliseconds();
send_time_ms += 10;
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
// Second burst sent at 8 * 1000 / 5 = 1600 kbps, arriving at 8 * 1000 / 8 =
@@ -168,7 +164,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
now_ms = clock_.TimeInMilliseconds();
send_time_ms += 5;
IncomingPacket(0, 1000, now_ms, send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -188,7 +184,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
send_time_ms += 10;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -207,7 +203,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetectionFasterArrival) {
send_time_ms += 10;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -225,7 +221,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetectionSlowerArrival) {
send_time_ms += 5;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -245,7 +241,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
send_time_ms += 1;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -261,8 +257,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, ProbingIgnoresSmallPackets) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 200, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 200, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -273,8 +268,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, ProbingIgnoresSmallPackets) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
// Wait long enough so that we can call Process again.
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index f38ef783067..b5adb9fa70a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -66,10 +66,10 @@ RemoteBitrateEstimatorSingleStream::~RemoteBitrateEstimatorSingleStream() {
}
}
-void RemoteBitrateEstimatorSingleStream::IncomingPacket(int64_t arrival_time_ms,
- size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+void RemoteBitrateEstimatorSingleStream::IncomingPacket(
+ int64_t arrival_time_ms,
+ size_t payload_size,
+ const RTPHeader& header) {
uint32_t ssrc = header.ssrc;
uint32_t rtp_timestamp = header.timestamp +
header.extension.transmissionTimeOffset;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
index 5516ea781da..2f74e1c4af0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
@@ -15,6 +15,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/rate_statistics.h"
#include "webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
@@ -30,8 +31,7 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
void Process() override;
int64_t TimeUntilNextProcess() override;
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
index 6fd0ad11b57..97e3abaa32b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
@@ -47,7 +47,7 @@ TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropOneStreamWrap) {
}
TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropTwoStreamsWrap) {
- CapacityDropTestHelper(2, true, 767);
+ CapacityDropTestHelper(2, true, 600);
}
TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThreeStreamsWrap) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
index 8bfb8ed0fd3..4530053a869 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
@@ -221,8 +221,7 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
size_t payload_size,
int64_t arrival_time,
uint32_t rtp_timestamp,
- uint32_t absolute_send_time,
- bool was_paced) {
+ uint32_t absolute_send_time) {
RTPHeader header;
memset(&header, 0, sizeof(header));
header.ssrc = ssrc;
@@ -230,7 +229,7 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
header.extension.hasAbsoluteSendTime = true;
header.extension.absoluteSendTime = absolute_send_time;
bitrate_estimator_->IncomingPacket(arrival_time + kArrivalTimeClockOffsetMs,
- payload_size, header, was_paced);
+ payload_size, header);
}
// Generates a frame of packets belonging to a stream at a given bitrate and
@@ -255,7 +254,7 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(uint32_t ssrc,
clock_.TimeInMicroseconds());
IncomingPacket(packet->ssrc, packet->size,
(packet->arrival_time + 500) / 1000, packet->rtp_timestamp,
- AbsSendTime(packet->send_time, 1000000), true);
+ AbsSendTime(packet->send_time, 1000000));
if (bitrate_observer_->updated()) {
if (bitrate_observer_->latest_bitrate() < bitrate_bps)
overuse = true;
@@ -319,7 +318,7 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
clock_.AdvanceTimeMilliseconds(1000);
// Inserting a packet. Still no valid estimate. We need to wait 5 seconds.
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
EXPECT_FALSE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_bps));
EXPECT_EQ(0u, ssrcs.size());
@@ -328,7 +327,7 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
// Inserting packets for 5 seconds to get a valid estimate.
for (int i = 0; i < 5 * kFramerate + 1; ++i) {
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(1000 / kFramerate);
timestamp += 90 * kFrameIntervalMs;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -356,13 +355,13 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
uint32_t timestamp = 0;
uint32_t absolute_send_time = 0;
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
EXPECT_FALSE(bitrate_observer_->updated()); // No valid estimate.
// Inserting packets for one second to get a valid estimate.
for (int i = 0; i < 5 * kFramerate + 1; ++i) {
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
timestamp += 90 * kFrameIntervalMs;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -379,12 +378,12 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
absolute_send_time = AddAbsSendTime(absolute_send_time,
2 * kFrameIntervalAbsSendTime);
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
- IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
- timestamp - 90 * kFrameIntervalMs,
- AddAbsSendTime(absolute_send_time,
- -static_cast<int>(kFrameIntervalAbsSendTime)),
- true);
+ absolute_send_time);
+ IncomingPacket(
+ kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
+ timestamp - 90 * kFrameIntervalMs,
+ AddAbsSendTime(absolute_send_time,
+ -static_cast<int>(kFrameIntervalAbsSendTime)));
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
@@ -517,7 +516,7 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
// time for the first estimate to be generated and for Process() to be called.
for (int i = 0; i <= 6 * kFramerate; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
timestamp += 90 * kFrameIntervalMs;
@@ -538,7 +537,7 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
// Insert |kTimestampGroupLength| frames with just 1 timestamp ticks in
// between. Should be treated as part of the same group by the estimator.
IncomingPacket(kDefaultSsrc, 100, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs / kTimestampGroupLength);
timestamp += 1;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -568,7 +567,7 @@ void RemoteBitrateEstimatorTest::TestWrappingHelper(
for (size_t i = 0; i < 3000; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
timestamp += kFrameIntervalMs;
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -583,9 +582,9 @@ void RemoteBitrateEstimatorTest::TestWrappingHelper(
absolute_send_time = AddAbsSendTime(absolute_send_time,
AbsSendTime(silence_time_s, 1));
bitrate_estimator_->Process();
- for (size_t i = 0; i < 10; ++i) {
+ for (size_t i = 0; i < 21; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
timestamp += kFrameIntervalMs;
clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs);
absolute_send_time = AddAbsSendTime(absolute_send_time,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
index 12ac9e8502d..b4bff670e75 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
@@ -173,8 +173,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
size_t payload_size,
int64_t arrival_time,
uint32_t rtp_timestamp,
- uint32_t absolute_send_time,
- bool was_paced);
+ uint32_t absolute_send_time);
// Generates a frame of packets belonging to a stream at a given bitrate and
// with a given ssrc. The stream is pushed through a very simple simulated
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
index 31cd9f98028..e307242f8d3 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
@@ -16,6 +16,7 @@
#include <algorithm>
#include <sstream>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/random.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
index eace9fc8b0a..2172bce9380 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
@@ -43,8 +43,7 @@ void RemoteEstimatorProxy::IncomingPacketFeedbackVector(
void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+ const RTPHeader& header) {
if (!header.extension.hasTransportSequenceNumber) {
LOG(LS_WARNING) << "RemoteEstimatorProxy: Incoming packet "
"is missing the transport sequence number extension!";
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
index 93d5244b67b..66373e29778 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
@@ -39,8 +39,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator {
const std::vector<PacketInfo>& packet_feedback_vector) override;
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
void RemoveStream(uint32_t ssrc) override {}
bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index 3c3c7297e18..a1264b2ff95 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -37,7 +37,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test {
header.extension.hasTransportSequenceNumber = true;
header.extension.transportSequenceNumber = seq;
header.ssrc = kMediaSsrc;
- proxy_.IncomingPacket(time_ms, kDefaultPacketSize, header, true);
+ proxy_.IncomingPacket(time_ms, kDefaultPacketSize, header);
}
void Process() {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
index a58d12a1600..f4fe2208a81 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
@@ -26,9 +26,7 @@ void SendTimeHistory::Clear() {
history_.clear();
}
-void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number,
- size_t length,
- bool was_paced) {
+void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number, size_t length) {
EraseOld();
if (history_.empty())
@@ -36,7 +34,7 @@ void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number,
history_.insert(std::pair<uint16_t, PacketInfo>(
sequence_number, PacketInfo(clock_->TimeInMilliseconds(), 0, -1,
- sequence_number, length, was_paced)));
+ sequence_number, length)));
}
bool SendTimeHistory::OnSentPacket(uint16_t sequence_number,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
index b525813cdca..7500f575fb7 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
@@ -33,9 +33,8 @@ class SendTimeHistoryTest : public ::testing::Test {
void AddPacketWithSendTime(uint16_t sequence_number,
size_t length,
- bool was_paced,
int64_t send_time_ms) {
- history_.AddAndRemoveOld(sequence_number, length, was_paced);
+ history_.AddAndRemoveOld(sequence_number, length);
history_.OnSentPacket(sequence_number, send_time_ms);
}
@@ -46,42 +45,40 @@ class SendTimeHistoryTest : public ::testing::Test {
// Help class extended so we can do EXPECT_EQ and collections.
class PacketInfo : public webrtc::PacketInfo {
public:
- PacketInfo() : webrtc::PacketInfo(-1, 0, 0, 0, 0, false) {}
+ PacketInfo() : webrtc::PacketInfo(-1, 0, 0, 0, 0) {}
PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
- : PacketInfo(arrival_time_ms, 0, sequence_number, 0, false) {}
+ : PacketInfo(arrival_time_ms, 0, sequence_number, 0) {}
PacketInfo(int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: webrtc::PacketInfo(-1,
arrival_time_ms,
send_time_ms,
sequence_number,
- payload_size,
- was_paced) {}
+ payload_size) {}
bool operator==(const PacketInfo& other) const {
return arrival_time_ms == other.arrival_time_ms &&
send_time_ms == other.send_time_ms &&
sequence_number == other.sequence_number &&
- payload_size == other.payload_size && was_paced == other.was_paced;
+ payload_size == other.payload_size;
}
};
TEST_F(SendTimeHistoryTest, AddRemoveOne) {
const uint16_t kSeqNo = 10;
- const PacketInfo kSentPacket(0, 1, kSeqNo, 1, true);
- AddPacketWithSendTime(kSeqNo, 1, true, 1);
+ const PacketInfo kSentPacket(0, 1, kSeqNo, 1);
+ AddPacketWithSendTime(kSeqNo, 1, 1);
- PacketInfo received_packet(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet(0, 0, kSeqNo, 0);
EXPECT_TRUE(history_.GetInfo(&received_packet, false));
EXPECT_EQ(kSentPacket, received_packet);
- PacketInfo received_packet2(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet2(0, 0, kSeqNo, 0);
EXPECT_TRUE(history_.GetInfo(&received_packet2, true));
EXPECT_EQ(kSentPacket, received_packet2);
- PacketInfo received_packet3(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet3(0, 0, kSeqNo, 0);
EXPECT_FALSE(history_.GetInfo(&received_packet3, true));
}
@@ -90,9 +87,8 @@ TEST_F(SendTimeHistoryTest, PopulatesExpectedFields) {
const int64_t kSendTime = 1000;
const int64_t kReceiveTime = 2000;
const size_t kPayloadSize = 42;
- const bool kPaced = true;
- AddPacketWithSendTime(kSeqNo, kPayloadSize, kPaced, kSendTime);
+ AddPacketWithSendTime(kSeqNo, kPayloadSize, kSendTime);
PacketInfo info(kReceiveTime, kSeqNo);
EXPECT_TRUE(history_.GetInfo(&info, true));
@@ -100,7 +96,6 @@ TEST_F(SendTimeHistoryTest, PopulatesExpectedFields) {
EXPECT_EQ(kSendTime, info.send_time_ms);
EXPECT_EQ(kSeqNo, info.sequence_number);
EXPECT_EQ(kPayloadSize, info.payload_size);
- EXPECT_EQ(kPaced, info.was_paced);
}
TEST_F(SendTimeHistoryTest, AddThenRemoveOutOfOrder) {
@@ -109,19 +104,16 @@ TEST_F(SendTimeHistoryTest, AddThenRemoveOutOfOrder) {
const size_t num_items = 100;
const size_t kPacketSize = 400;
const size_t kTransmissionTime = 1234;
- const bool kPaced = true;
for (size_t i = 0; i < num_items; ++i) {
sent_packets.push_back(PacketInfo(0, static_cast<int64_t>(i),
- static_cast<uint16_t>(i), kPacketSize,
- kPaced));
+ static_cast<uint16_t>(i), kPacketSize));
received_packets.push_back(
PacketInfo(static_cast<int64_t>(i) + kTransmissionTime, 0,
- static_cast<uint16_t>(i), kPacketSize, false));
+ static_cast<uint16_t>(i), kPacketSize));
}
for (size_t i = 0; i < num_items; ++i) {
history_.AddAndRemoveOld(sent_packets[i].sequence_number,
- sent_packets[i].payload_size,
- sent_packets[i].was_paced);
+ sent_packets[i].payload_size);
}
for (size_t i = 0; i < num_items; ++i)
history_.OnSentPacket(sent_packets[i].sequence_number,
@@ -143,19 +135,19 @@ TEST_F(SendTimeHistoryTest, HistorySize) {
const int kItems = kDefaultHistoryLengthMs / 100;
for (int i = 0; i < kItems; ++i) {
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(i, 0, false, i * 100);
+ AddPacketWithSendTime(i, 0, i * 100);
}
for (int i = 0; i < kItems; ++i) {
- PacketInfo info(0, 0, static_cast<uint16_t>(i), 0, false);
+ PacketInfo info(0, 0, static_cast<uint16_t>(i), 0);
EXPECT_TRUE(history_.GetInfo(&info, false));
EXPECT_EQ(i * 100, info.send_time_ms);
}
clock_.AdvanceTimeMilliseconds(101);
- AddPacketWithSendTime(kItems, 0, false, kItems * 101);
- PacketInfo info(0, 0, 0, 0, false);
+ AddPacketWithSendTime(kItems, 0, kItems * 101);
+ PacketInfo info(0, 0, 0, 0);
EXPECT_FALSE(history_.GetInfo(&info, false));
for (int i = 1; i < (kItems + 1); ++i) {
- PacketInfo info2(0, 0, static_cast<uint16_t>(i), 0, false);
+ PacketInfo info2(0, 0, static_cast<uint16_t>(i), 0);
EXPECT_TRUE(history_.GetInfo(&info2, false));
int64_t expected_time_ms = (i == kItems) ? i * 101 : i * 100;
EXPECT_EQ(expected_time_ms, info2.send_time_ms);
@@ -164,16 +156,16 @@ TEST_F(SendTimeHistoryTest, HistorySize) {
TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
const uint16_t kMaxSeqNo = std::numeric_limits<uint16_t>::max();
- AddPacketWithSendTime(kMaxSeqNo - 2, 0, false, 0);
+ AddPacketWithSendTime(kMaxSeqNo - 2, 0, 0);
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(kMaxSeqNo - 1, 1, false, 100);
+ AddPacketWithSendTime(kMaxSeqNo - 1, 1, 100);
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(kMaxSeqNo, 0, false, 200);
+ AddPacketWithSendTime(kMaxSeqNo, 0, 200);
clock_.AdvanceTimeMilliseconds(kDefaultHistoryLengthMs - 200 + 1);
- AddPacketWithSendTime(0, 0, false, kDefaultHistoryLengthMs);
+ AddPacketWithSendTime(0, 0, kDefaultHistoryLengthMs);
PacketInfo info(0, static_cast<uint16_t>(kMaxSeqNo - 2));
EXPECT_FALSE(history_.GetInfo(&info, false));
@@ -189,7 +181,7 @@ TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
EXPECT_TRUE(history_.GetInfo(&info5, true));
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(1, 0, false, 1100);
+ AddPacketWithSendTime(1, 0, 1100);
PacketInfo info6(0, static_cast<uint16_t>(kMaxSeqNo - 2));
EXPECT_FALSE(history_.GetInfo(&info6, false));
@@ -206,26 +198,26 @@ TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
TEST_F(SendTimeHistoryTest, InterlievedGetAndRemove) {
const uint16_t kSeqNo = 1;
const int64_t kTimestamp = 2;
- PacketInfo packets[3] = {{0, kTimestamp, kSeqNo, 0, false},
- {0, kTimestamp + 1, kSeqNo + 1, 0, false},
- {0, kTimestamp + 2, kSeqNo + 2, 0, false}};
+ PacketInfo packets[3] = {{0, kTimestamp, kSeqNo, 0},
+ {0, kTimestamp + 1, kSeqNo + 1, 0},
+ {0, kTimestamp + 2, kSeqNo + 2, 0}};
AddPacketWithSendTime(packets[0].sequence_number, packets[0].payload_size,
- packets[0].was_paced, packets[0].send_time_ms);
+ packets[0].send_time_ms);
AddPacketWithSendTime(packets[1].sequence_number, packets[1].payload_size,
- packets[1].was_paced, packets[1].send_time_ms);
- PacketInfo info(0, 0, packets[0].sequence_number, 0, false);
+ packets[1].send_time_ms);
+ PacketInfo info(0, 0, packets[0].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info, true));
EXPECT_EQ(packets[0], info);
AddPacketWithSendTime(packets[2].sequence_number, packets[2].payload_size,
- packets[2].was_paced, packets[2].send_time_ms);
+ packets[2].send_time_ms);
- PacketInfo info2(0, 0, packets[1].sequence_number, 0, false);
+ PacketInfo info2(0, 0, packets[1].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info2, true));
EXPECT_EQ(packets[1], info2);
- PacketInfo info3(0, 0, packets[2].sequence_number, 0, false);
+ PacketInfo info3(0, 0, packets[2].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info3, true));
EXPECT_EQ(packets[2], info3);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
index 5e282c6f087..b21a269ec6b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
@@ -13,6 +13,7 @@
#include <limits>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
index bd016cf318b..fc3018f9967 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
@@ -16,6 +16,7 @@
#include <sstream>
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/gtest_prod_util.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
index 41bf836c9e9..a9fd617118a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
@@ -14,6 +14,8 @@
#include <sstream>
+#include "webrtc/base/constructormagic.h"
+
namespace webrtc {
namespace testing {
namespace bwe {
@@ -97,18 +99,14 @@ Packet::Packet()
creation_time_us_(-1),
send_time_us_(-1),
sender_timestamp_us_(-1),
- payload_size_(0),
- paced_(false) {
-}
+ payload_size_(0) {}
Packet::Packet(int flow_id, int64_t send_time_us, size_t payload_size)
: flow_id_(flow_id),
creation_time_us_(send_time_us),
send_time_us_(send_time_us),
sender_timestamp_us_(send_time_us),
- payload_size_(payload_size),
- paced_(false) {
-}
+ payload_size_(payload_size) {}
Packet::~Packet() {
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
index 223b20f21c5..1fe3a228e4b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
@@ -25,6 +25,7 @@
#include <vector>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/random.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/include/module_common_types.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
index 37009c77018..2a33440bc3b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
@@ -21,6 +21,7 @@
#include <map>
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/voice_engine/channel.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
index d469e675e41..e2d3da9632a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
@@ -69,7 +69,7 @@ RembReceiver::RembReceiver(int flow_id, bool plot)
recv_stats_(ReceiveStatistics::Create(&clock_)),
latest_estimate_bps_(-1),
last_feedback_ms_(-1),
- estimator_(new RemoteBitrateEstimatorAbsSendTime(this, &clock_)) {
+ estimator_(new RemoteBitrateEstimatorAbsSendTime(this)) {
std::stringstream ss;
ss << "Estimate_" << flow_id_ << "#1";
estimate_log_prefix_ = ss.str();
@@ -95,7 +95,7 @@ void RembReceiver::ReceivePacket(int64_t arrival_time_ms,
step_ms = std::max<int64_t>(estimator_->TimeUntilNextProcess(), 0);
}
estimator_->IncomingPacket(arrival_time_ms, media_packet.payload_size(),
- media_packet.header(), true);
+ media_packet.header());
clock_.AdvanceTimeMilliseconds(arrival_time_ms - clock_.TimeInMilliseconds());
ASSERT_TRUE(arrival_time_ms == clock_.TimeInMilliseconds());
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
index 5e6b6edb256..3dc4f388c8f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
index 36dff1fb2ac..c54a7b05949 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
@@ -23,7 +23,7 @@ const int kFeedbackIntervalMs = 50;
FullBweSender::FullBweSender(int kbps, BitrateObserver* observer, Clock* clock)
: bitrate_controller_(
BitrateController::CreateBitrateController(clock, observer)),
- rbe_(new RemoteBitrateEstimatorAbsSendTime(this, clock)),
+ rbe_(new RemoteBitrateEstimatorAbsSendTime(this)),
feedback_observer_(bitrate_controller_->CreateRtcpBandwidthObserver()),
clock_(clock),
send_time_history_(clock_, 10000),
@@ -93,8 +93,7 @@ void FullBweSender::OnPacketsSent(const Packets& packets) {
if (packet->GetPacketType() == Packet::kMedia) {
MediaPacket* media_packet = static_cast<MediaPacket*>(packet);
send_time_history_.AddAndRemoveOld(media_packet->header().sequenceNumber,
- media_packet->payload_size(),
- packet->paced());
+ media_packet->payload_size());
send_time_history_.OnSentPacket(media_packet->header().sequenceNumber,
media_packet->sender_timestamp_ms());
}
@@ -126,7 +125,7 @@ void SendSideBweReceiver::ReceivePacket(int64_t arrival_time_ms,
const MediaPacket& media_packet) {
packet_feedback_vector_.push_back(PacketInfo(
-1, arrival_time_ms, media_packet.sender_timestamp_ms(),
- media_packet.header().sequenceNumber, media_packet.payload_size(), true));
+ media_packet.header().sequenceNumber, media_packet.payload_size()));
// Log received packet information.
BweReceiver::ReceivePacket(arrival_time_ms, media_packet);
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
index 4a361c4dc20..9aa596c1bd7 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
@@ -42,8 +42,6 @@ class Packet {
virtual void set_sender_timestamp_us(int64_t sender_timestamp_us) {
sender_timestamp_us_ = sender_timestamp_us;
}
- virtual void set_paced(bool paced) { paced_ = paced; }
- virtual bool paced() const { return paced_; }
virtual int64_t creation_time_ms() const {
return (creation_time_us_ + 500) / 1000;
}
@@ -58,7 +56,6 @@ class Packet {
int64_t send_time_us_; // Time the packet left last processor touching it.
int64_t sender_timestamp_us_; // Time the packet left the Sender.
size_t payload_size_; // Size of the (non-existent, simulated) payload.
- bool paced_; // True if sent through paced sender.
};
class MediaPacket : public Packet {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
index 3bcbc0a071b..7ffeb5584b0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
@@ -157,13 +157,9 @@ PacedVideoSender::PacedVideoSender(PacketProcessorListener* listener,
VideoSource* source,
BandwidthEstimatorType estimator)
: VideoSender(listener, source, estimator),
- pacer_(&clock_,
- this,
- source->bits_per_second() / 1000,
- PacedSender::kDefaultPaceMultiplier * source->bits_per_second() /
- 1000,
- 0) {
+ pacer_(&clock_, this) {
modules_.push_back(&pacer_);
+ pacer_.SetEstimatedBitrate(source->bits_per_second());
}
PacedVideoSender::~PacedVideoSender() {
@@ -271,8 +267,6 @@ void PacedVideoSender::QueuePackets(Packets* batch,
}
Packets to_transfer;
to_transfer.splice(to_transfer.begin(), queue_, queue_.begin(), it);
- for (Packet* packet : to_transfer)
- packet->set_paced(true);
bwe_->OnPacketsSent(to_transfer);
batch->merge(to_transfer, DereferencingComparator<Packet>);
}
@@ -280,7 +274,8 @@ void PacedVideoSender::QueuePackets(Packets* batch,
bool PacedVideoSender::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) {
for (Packets::iterator it = pacer_queue_.begin(); it != pacer_queue_.end();
++it) {
MediaPacket* media_packet = static_cast<MediaPacket*>(*it);
@@ -310,9 +305,7 @@ void PacedVideoSender::OnNetworkChanged(uint32_t target_bitrate_bps,
uint8_t fraction_lost,
int64_t rtt) {
VideoSender::OnNetworkChanged(target_bitrate_bps, fraction_lost, rtt);
- pacer_.UpdateBitrate(
- target_bitrate_bps / 1000,
- PacedSender::kDefaultPaceMultiplier * target_bitrate_bps / 1000, 0);
+ pacer_.SetEstimatedBitrate(target_bitrate_bps);
}
const int kNoLimit = std::numeric_limits<int>::max();
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
index 5ed4a3bc380..4990574bdec 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
@@ -100,7 +100,7 @@ class VideoSender : public PacketSender, public BitrateObserver {
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSender);
};
-class PacedVideoSender : public VideoSender, public PacedSender::Callback {
+class PacedVideoSender : public VideoSender, public PacedSender::PacketSender {
public:
PacedVideoSender(PacketProcessorListener* listener,
VideoSource* source,
@@ -113,7 +113,8 @@ class PacedVideoSender : public VideoSender, public PacedSender::Callback {
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) override;
+ bool retransmission,
+ int probe_cluster_id) override;
size_t TimeToSendPadding(size_t bytes) override;
// Implements BitrateObserver.
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
index f138035de53..7ae6ede363e 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -117,7 +117,7 @@ bool ParseArgsAndSetupEstimator(int argc,
switch (extension) {
case webrtc::kRtpExtensionAbsoluteSendTime: {
*estimator =
- new webrtc::RemoteBitrateEstimatorAbsSendTime(observer, clock);
+ new webrtc::RemoteBitrateEstimatorAbsSendTime(observer);
*estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator";
break;
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
index 08dc0e63a02..f5dbaef3717 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
@@ -83,7 +83,7 @@ int main(int argc, char** argv) {
packet_length = packet.original_length;
}
rbe->IncomingPacket(clock.TimeInMilliseconds(),
- packet_length - header.headerLength, header, true);
+ packet_length - header.headerLength, header);
++packet_counter;
}
if (!rtp_reader->NextPacket(&packet)) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
index f7e07a5dc51..a02f407d072 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
+#include <algorithm>
#include <limits>
#include "webrtc/base/checks.h"
@@ -26,6 +27,17 @@ const int64_t kBaseTimestampScaleFactor =
rtcp::TransportFeedback::kDeltaScaleFactor * (1 << 8);
const int64_t kBaseTimestampRangeSizeUs = kBaseTimestampScaleFactor * (1 << 24);
+class PacketInfoComparator {
+ public:
+ inline bool operator()(const PacketInfo& lhs, const PacketInfo& rhs) {
+ if (lhs.arrival_time_ms != rhs.arrival_time_ms)
+ return lhs.arrival_time_ms < rhs.arrival_time_ms;
+ if (lhs.send_time_ms != rhs.send_time_ms)
+ return lhs.send_time_ms < rhs.send_time_ms;
+ return lhs.sequence_number < rhs.sequence_number;
+ }
+};
+
TransportFeedbackAdapter::TransportFeedbackAdapter(
BitrateController* bitrate_controller,
Clock* clock)
@@ -46,10 +58,9 @@ void TransportFeedbackAdapter::SetBitrateEstimator(
}
void TransportFeedbackAdapter::AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) {
+ size_t length) {
rtc::CritScope cs(&lock_);
- send_time_history_.AddAndRemoveOld(sequence_number, length, was_paced);
+ send_time_history_.AddAndRemoveOld(sequence_number, length);
}
void TransportFeedbackAdapter::OnSentPacket(uint16_t sequence_number,
@@ -104,6 +115,8 @@ void TransportFeedbackAdapter::OnTransportFeedback(
}
++sequence_number;
}
+ std::sort(packet_feedback_vector.begin(), packet_feedback_vector.end(),
+ PacketInfoComparator());
RTC_DCHECK(delta_it == delta_vec.end());
if (failed_lookups > 0) {
LOG(LS_WARNING) << "Failed to lookup send time for " << failed_lookups
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
index c97ef57cf05..867570f26b9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
@@ -38,9 +38,7 @@ class TransportFeedbackAdapter : public TransportFeedbackObserver,
}
// Implements TransportFeedbackObserver.
- void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) override;
+ void AddPacket(uint16_t sequence_number, size_t length) override;
void OnSentPacket(uint16_t sequence_number, int64_t send_time_ms);
void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
index f3be09206ec..239f2ec561d 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
@@ -92,15 +92,13 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
EXPECT_EQ(truth[i].send_time_ms, input[i].send_time_ms);
EXPECT_EQ(truth[i].sequence_number, input[i].sequence_number);
EXPECT_EQ(truth[i].payload_size, input[i].payload_size);
- EXPECT_EQ(truth[i].was_paced, input[i].was_paced);
}
}
// Utility method, to reset arrival_time_ms before adding send time.
void OnSentPacket(PacketInfo info) {
info.arrival_time_ms = 0;
- adapter_->AddPacket(info.sequence_number, info.payload_size,
- info.was_paced);
+ adapter_->AddPacket(info.sequence_number, info.payload_size);
adapter_->OnSentPacket(info.sequence_number, info.send_time_ms);
}
@@ -114,11 +112,11 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) {
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(100, 200, 0, 1500, true));
- packets.push_back(PacketInfo(110, 210, 1, 1500, true));
- packets.push_back(PacketInfo(120, 220, 2, 1500, true));
- packets.push_back(PacketInfo(130, 230, 3, 1500, true));
- packets.push_back(PacketInfo(140, 240, 4, 1500, true));
+ packets.push_back(PacketInfo(100, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(120, 220, 2, 1500));
+ packets.push_back(PacketInfo(130, 230, 3, 1500));
+ packets.push_back(PacketInfo(140, 240, 4, 1500));
for (const PacketInfo& packet : packets)
OnSentPacket(packet);
@@ -145,11 +143,11 @@ TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) {
TEST_F(TransportFeedbackAdapterTest, HandlesDroppedPackets) {
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(100, 200, 0, 1500, true));
- packets.push_back(PacketInfo(110, 210, 1, 1500, true));
- packets.push_back(PacketInfo(120, 220, 2, 1500, true));
- packets.push_back(PacketInfo(130, 230, 3, 1500, true));
- packets.push_back(PacketInfo(140, 240, 4, 1500, true));
+ packets.push_back(PacketInfo(100, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(120, 220, 2, 1500));
+ packets.push_back(PacketInfo(130, 230, 3, 1500));
+ packets.push_back(PacketInfo(140, 240, 4, 1500));
const uint16_t kSendSideDropBefore = 1;
const uint16_t kReceiveSideDropAfter = 3;
@@ -190,9 +188,9 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
static_cast<int64_t>(1 << 8) *
static_cast<int64_t>((1 << 23) - 1) / 1000;
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(kHighArrivalTimeMs - 64, 200, 0, 1500, true));
- packets.push_back(PacketInfo(kHighArrivalTimeMs + 64, 210, 1, 1500, true));
- packets.push_back(PacketInfo(kHighArrivalTimeMs, 220, 2, 1500, true));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs - 64, 200, 0, 1500));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs + 64, 210, 1, 1500));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs, 220, 2, 1500));
for (const PacketInfo& packet : packets)
OnSentPacket(packet);
@@ -207,8 +205,8 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
packets[i].sequence_number, packets[i].arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(rtcp::TransportFeedback::ParseFrom(
- raw_packet.data(), raw_packet.size()));
+ feedback = rtcp::TransportFeedback::ParseFrom(raw_packet.data(),
+ raw_packet.size());
std::vector<PacketInfo> expected_packets;
expected_packets.push_back(packets[i]);
@@ -223,6 +221,39 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
}
}
+TEST_F(TransportFeedbackAdapterTest, HandlesReordering) {
+ std::vector<PacketInfo> packets;
+ packets.push_back(PacketInfo(120, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(100, 220, 2, 1500));
+ std::vector<PacketInfo> expected_packets;
+ expected_packets.push_back(packets[2]);
+ expected_packets.push_back(packets[1]);
+ expected_packets.push_back(packets[0]);
+
+ for (const PacketInfo& packet : packets)
+ OnSentPacket(packet);
+
+ rtcp::TransportFeedback feedback;
+ feedback.WithBase(packets[0].sequence_number,
+ packets[0].arrival_time_ms * 1000);
+
+ for (const PacketInfo& packet : packets) {
+ EXPECT_TRUE(feedback.WithReceivedPacket(packet.sequence_number,
+ packet.arrival_time_ms * 1000));
+ }
+
+ feedback.Build();
+
+ EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
+ .Times(1)
+ .WillOnce(Invoke([expected_packets,
+ this](const std::vector<PacketInfo>& feedback_vector) {
+ ComparePacketVectors(expected_packets, feedback_vector);
+ }));
+ adapter_->OnTransportFeedback(feedback);
+}
+
TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
std::vector<PacketInfo> sent_packets;
const int64_t kSmallDeltaUs =
@@ -257,6 +288,14 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
info.arrival_time_ms += (kLargePositiveDeltaUs + 1000) / 1000;
++info.sequence_number;
+ // Expected to be ordered on arrival time when the feedback message has been
+ // parsed.
+ std::vector<PacketInfo> expected_packets;
+ expected_packets.push_back(sent_packets[0]);
+ expected_packets.push_back(sent_packets[3]);
+ expected_packets.push_back(sent_packets[1]);
+ expected_packets.push_back(sent_packets[2]);
+
// Packets will be added to send history.
for (const PacketInfo& packet : sent_packets)
OnSentPacket(packet);
@@ -276,17 +315,17 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
info.arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(
- rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
+ feedback =
+ rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
std::vector<PacketInfo> received_feedback;
EXPECT_TRUE(feedback.get() != nullptr);
EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
.Times(1)
- .WillOnce(Invoke([sent_packets, &received_feedback](
+ .WillOnce(Invoke([expected_packets, &received_feedback](
const std::vector<PacketInfo>& feedback_vector) {
- EXPECT_EQ(sent_packets.size(), feedback_vector.size());
+ EXPECT_EQ(expected_packets.size(), feedback_vector.size());
received_feedback = feedback_vector;
}));
adapter_->OnTransportFeedback(*feedback.get());
@@ -297,8 +336,8 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
EXPECT_TRUE(feedback->WithReceivedPacket(info.sequence_number,
info.arrival_time_ms * 1000));
raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(
- rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
+ feedback =
+ rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
EXPECT_TRUE(feedback.get() != nullptr);
EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
@@ -310,9 +349,9 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
}));
adapter_->OnTransportFeedback(*feedback.get());
- sent_packets.push_back(info);
+ expected_packets.push_back(info);
- ComparePacketVectors(sent_packets, received_feedback);
+ ComparePacketVectors(expected_packets, received_feedback);
}
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
index d386951cb00..9d69811ef31 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
@@ -118,9 +118,15 @@ source_set("rtp_rtcp") {
"source/rtp_format_vp9.h",
"source/rtp_header_extension.cc",
"source/rtp_header_extension.h",
+ "source/rtp_header_extensions.cc",
+ "source/rtp_header_extensions.h",
"source/rtp_header_parser.cc",
+ "source/rtp_packet.cc",
+ "source/rtp_packet.h",
"source/rtp_packet_history.cc",
"source/rtp_packet_history.h",
+ "source/rtp_packet_received.h",
+ "source/rtp_packet_to_send.h",
"source/rtp_payload_registry.cc",
"source/rtp_receiver_audio.cc",
"source/rtp_receiver_audio.h",
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
index 56c6e48691c..207e749a02a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
@@ -11,7 +11,9 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
namespace webrtc {
@@ -40,7 +42,7 @@ class RemoteNtpTimeEstimator {
private:
Clock* clock_;
- rtc::scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
+ std::unique_ptr<TimestampExtrapolator> ts_extrapolator_;
RtcpList rtcp_list_;
int64_t last_timing_log_ms_;
RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
index fae864107f0..a199755aafe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
@@ -12,8 +12,9 @@
#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_PAYLOAD_REGISTRY_H_
#include <map>
+#include <memory>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -85,15 +86,6 @@ class RTPPayloadRegistry {
bool IsRtx(const RTPHeader& header) const;
- // DEPRECATED. Use RestoreOriginalPacket below that takes a uint8_t*
- // restored_packet, instead of a uint8_t**.
- // TODO(noahric): Remove this when all callers have been updated.
- bool RestoreOriginalPacket(uint8_t** restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const;
-
bool RestoreOriginalPacket(uint8_t* restored_packet,
const uint8_t* packet,
size_t* packet_length,
@@ -110,19 +102,10 @@ class RTPPayloadRegistry {
int GetPayloadTypeFrequency(uint8_t payload_type) const;
- // DEPRECATED. Use PayloadTypeToPayload below that returns const Payload*
- // instead of taking output parameter.
- // TODO(danilchap): Remove this when all callers have been updated.
- bool PayloadTypeToPayload(const uint8_t payload_type,
- RtpUtility::Payload*& payload) const { // NOLINT
- payload =
- const_cast<RtpUtility::Payload*>(PayloadTypeToPayload(payload_type));
- return payload != nullptr;
- }
const RtpUtility::Payload* PayloadTypeToPayload(uint8_t payload_type) const;
void ResetLastReceivedPayloadTypes() {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
last_received_payload_type_ = -1;
last_received_media_payload_type_ = -1;
}
@@ -136,34 +119,34 @@ class RTPPayloadRegistry {
bool ReportMediaPayloadType(uint8_t media_payload_type);
int8_t red_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return red_payload_type_;
}
int8_t ulpfec_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return ulpfec_payload_type_;
}
int8_t last_received_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return last_received_payload_type_;
}
void set_last_received_payload_type(int8_t last_received_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
last_received_payload_type_ = last_received_payload_type;
}
int8_t last_received_media_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return last_received_media_payload_type_;
}
bool use_rtx_payload_mapping_on_restore() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return use_rtx_payload_mapping_on_restore_;
}
void set_use_rtx_payload_mapping_on_restore(bool val) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
use_rtx_payload_mapping_on_restore_ = val;
}
@@ -178,9 +161,9 @@ class RTPPayloadRegistry {
bool IsRtxInternal(const RTPHeader& header) const;
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
RtpUtility::PayloadTypeMap payload_type_map_;
- rtc::scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
+ std::unique_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
int8_t red_payload_type_;
int8_t ulpfec_payload_type_;
int8_t incoming_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
index f393e41eabd..9db1c63da78 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
@@ -75,12 +75,6 @@ class RtpReceiver {
PayloadUnion payload_specific,
bool in_order) = 0;
- // Returns the currently configured NACK method.
- virtual NACKMethod NACK() const = 0;
-
- // Turn negative acknowledgement (NACK) requests on/off.
- virtual void SetNACKStatus(const NACKMethod method) = 0;
-
// Gets the last received timestamp. Returns true if a packet has been
// received, false otherwise.
virtual bool Timestamp(uint32_t* timestamp) const = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
index d01465b9f8f..66589888bda 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -16,6 +16,7 @@
#include <utility>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
@@ -77,7 +78,7 @@ class RtpRtcp : public Module {
FrameCountObserver* send_frame_count_observer;
SendSideDelayObserver* send_side_delay_observer;
RtcEventLog* event_log;
-
+ SendPacketObserver* send_packet_observer;
RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
};
@@ -210,10 +211,10 @@ class RtpRtcp : public Module {
*/
virtual void SetSequenceNumber(uint16_t seq) = 0;
- // Returns true if the ssrc matched this module, false otherwise.
- virtual bool SetRtpStateForSsrc(uint32_t ssrc,
- const RtpState& rtp_state) = 0;
- virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) = 0;
+ virtual void SetRtpState(const RtpState& rtp_state) = 0;
+ virtual void SetRtxState(const RtpState& rtp_state) = 0;
+ virtual RtpState GetRtpState() const = 0;
+ virtual RtpState GetRtxState() const = 0;
/*
* Get SSRC
@@ -539,6 +540,9 @@ class RtpRtcp : public Module {
/*
* Send NACK for the packets specified.
+ *
+ * Note: This assumes the caller keeps track of timing and doesn't rely on
+ * the RTP module to do this.
*/
virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
@@ -594,12 +598,6 @@ class RtpRtcp : public Module {
*
* return -1 on failure else 0
*/
- // DEPRECATED. Use SendREDPayloadType below that takes output parameter
- // by pointer instead of by reference.
- // TODO(danilchap): Remove this when all callers have been updated.
- int32_t SendREDPayloadType(int8_t& payloadType) const { // NOLINT
- return SendREDPayloadType(&payloadType);
- }
virtual int32_t SendREDPayloadType(int8_t* payload_type) const = 0;
/*
* Store the audio level in dBov for header-extension-for-audio-level-
@@ -632,14 +630,6 @@ class RtpRtcp : public Module {
/*
* Get generic FEC setting
*/
- // DEPRECATED. Use GenericFECStatus below that takes output parameters
- // by pointers instead of by references.
- // TODO(danilchap): Remove this when all callers have been updated.
- void GenericFECStatus(bool& enable, // NOLINT
- uint8_t& payloadTypeRED, // NOLINT
- uint8_t& payloadTypeFEC) { // NOLINT
- GenericFECStatus(&enable, &payloadTypeRED, &payloadTypeFEC);
- }
virtual void GenericFECStatus(bool* enable,
uint8_t* payload_type_red,
uint8_t* payload_type_fec) = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 9acef79f383..8dc8b0651d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -99,8 +99,6 @@ enum KeyFrameRequestMethod { kKeyFrameReqPliRtcp, kKeyFrameReqFirRtcp };
enum RtpRtcpPacketType { kPacketRtp = 0, kPacketKeepAlive = 1 };
-enum NACKMethod { kNackOff = 0, kNackRtcp = 2 };
-
enum RetransmissionMode : uint8_t {
kRetransmitOff = 0x0,
kRetransmitFECPackets = 0x1,
@@ -247,32 +245,28 @@ class RtcpBandwidthObserver {
struct PacketInfo {
PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
- : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0, false) {}
+ : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0) {}
PacketInfo(int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: PacketInfo(-1,
arrival_time_ms,
send_time_ms,
sequence_number,
- payload_size,
- was_paced) {}
+ payload_size) {}
PacketInfo(int64_t creation_time_ms,
int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: creation_time_ms(creation_time_ms),
arrival_time_ms(arrival_time_ms),
send_time_ms(send_time_ms),
sequence_number(sequence_number),
- payload_size(payload_size),
- was_paced(was_paced) {}
+ payload_size(payload_size) {}
// Time corresponding to when this object was created.
int64_t creation_time_ms;
@@ -287,8 +281,6 @@ struct PacketInfo {
uint16_t sequence_number;
// Size of the packet excluding RTP headers.
size_t payload_size;
- // True if the packet was paced out by the pacer.
- bool was_paced;
};
class TransportFeedbackObserver {
@@ -298,9 +290,7 @@ class TransportFeedbackObserver {
// Note: Transport-wide sequence number as sequence number. Arrival time
// must be set to 0.
- virtual void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) = 0;
+ virtual void AddPacket(uint16_t sequence_number, size_t length) = 0;
virtual void OnTransportFeedback(const rtcp::TransportFeedback& feedback) = 0;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 9991aa21108..bf5e9369215 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -85,12 +85,12 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD0(StartTimestamp,
uint32_t());
MOCK_METHOD1(SetStartTimestamp, void(const uint32_t timestamp));
- MOCK_CONST_METHOD0(SequenceNumber,
- uint16_t());
+ MOCK_CONST_METHOD0(SequenceNumber, uint16_t());
MOCK_METHOD1(SetSequenceNumber, void(const uint16_t seq));
- MOCK_METHOD2(SetRtpStateForSsrc,
- bool(uint32_t ssrc, const RtpState& rtp_state));
- MOCK_METHOD2(GetRtpStateForSsrc, bool(uint32_t ssrc, RtpState* rtp_state));
+ MOCK_METHOD1(SetRtpState, void(const RtpState& rtp_state));
+ MOCK_METHOD1(SetRtxState, void(const RtpState& rtp_state));
+ MOCK_CONST_METHOD0(GetRtpState, RtpState());
+ MOCK_CONST_METHOD0(GetRtxState, RtpState());
MOCK_CONST_METHOD0(SSRC,
uint32_t());
MOCK_METHOD1(SetSSRC,
@@ -202,10 +202,6 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_METHOD1(SetTMMBRStatus, void(const bool enable));
MOCK_METHOD1(OnBandwidthEstimateUpdate,
void(uint16_t bandWidthKbit));
- MOCK_CONST_METHOD0(NACK,
- NACKMethod());
- MOCK_METHOD2(SetNACKStatus,
- int32_t(const NACKMethod method, int oldestSequenceNumberToNack));
MOCK_CONST_METHOD0(SelectiveRetransmissions,
int());
MOCK_METHOD1(SetSelectiveRetransmissions,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi b/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
index 23a64b752f7..3f1e935b2a6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
@@ -35,10 +35,6 @@
'source/receive_statistics_impl.cc',
'source/receive_statistics_impl.h',
'source/remote_ntp_time_estimator.cc',
- 'source/rtp_header_parser.cc',
- 'source/rtp_rtcp_config.h',
- 'source/rtp_rtcp_impl.cc',
- 'source/rtp_rtcp_impl.h',
'source/rtcp_packet.cc',
'source/rtcp_packet.h',
'source/rtcp_packet/app.cc',
@@ -103,8 +99,18 @@
'source/rtcp_utility.h',
'source/rtp_header_extension.cc',
'source/rtp_header_extension.h',
+ 'source/rtp_header_extensions.cc',
+ 'source/rtp_header_extensions.h',
+ 'source/rtp_header_parser.cc',
+ 'source/rtp_packet.cc',
+ 'source/rtp_packet.h',
+ 'source/rtp_packet_received.h',
+ 'source/rtp_packet_to_send.h',
'source/rtp_receiver_impl.cc',
'source/rtp_receiver_impl.h',
+ 'source/rtp_rtcp_config.h',
+ 'source/rtp_rtcp_impl.cc',
+ 'source/rtp_rtcp_impl.h',
'source/rtp_sender.cc',
'source/rtp_sender.h',
'source/rtp_utility.cc',
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
index 4e9fc72c1f8..49a23592bfe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
@@ -11,13 +11,11 @@
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
Bitrate::Bitrate(Clock* clock, Observer* observer)
: clock_(clock),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
packet_rate_(0),
bitrate_(0),
bitrate_next_idx_(0),
@@ -33,23 +31,23 @@ Bitrate::Bitrate(Clock* clock, Observer* observer)
Bitrate::~Bitrate() {}
void Bitrate::Update(const size_t bytes) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
bytes_count_ += bytes;
packet_count_++;
}
uint32_t Bitrate::PacketRate() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return packet_rate_;
}
uint32_t Bitrate::BitrateLast() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return bitrate_;
}
uint32_t Bitrate::BitrateNow() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
int64_t now = clock_->TimeInMilliseconds();
int64_t diff_ms = now - time_last_rate_update_;
@@ -67,7 +65,7 @@ uint32_t Bitrate::BitrateNow() const {
}
int64_t Bitrate::time_last_rate_update() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return time_last_rate_update_;
}
@@ -75,7 +73,7 @@ int64_t Bitrate::time_last_rate_update() const {
void Bitrate::Process() {
BitrateStatistics stats;
{
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
int64_t now = clock_->CurrentNtpInMilliseconds();
int64_t diff_ms = now - time_last_rate_update_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
index 393d05d3e3e..7aaaead42d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
@@ -15,7 +15,7 @@
#include <list>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -23,7 +23,6 @@
namespace webrtc {
class Clock;
-class CriticalSectionWrapper;
class Bitrate {
public:
@@ -60,7 +59,7 @@ class Bitrate {
Clock* clock_;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ rtc::CriticalSection crit_;
uint32_t packet_rate_;
uint32_t bitrate_;
uint8_t bitrate_next_idx_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
index ab21b8704a5..81e8b5926e2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -13,20 +13,16 @@
#include <string.h>
namespace webrtc {
-DTMFqueue::DTMFqueue()
- : dtmf_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- next_empty_index_(0) {
+DTMFqueue::DTMFqueue() : next_empty_index_(0) {
memset(dtmf_key_, 0, sizeof(dtmf_key_));
memset(dtmf_length, 0, sizeof(dtmf_length));
memset(dtmf_level_, 0, sizeof(dtmf_level_));
}
-DTMFqueue::~DTMFqueue() {
- delete dtmf_critsect_;
-}
+DTMFqueue::~DTMFqueue() {}
int32_t DTMFqueue::AddDTMF(uint8_t key, uint16_t len, uint8_t level) {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
if (next_empty_index_ >= DTMF_OUTBAND_MAX) {
return -1;
@@ -40,7 +36,7 @@ int32_t DTMFqueue::AddDTMF(uint8_t key, uint16_t len, uint8_t level) {
}
int8_t DTMFqueue::NextDTMF(uint8_t* dtmf_key, uint16_t* len, uint8_t* level) {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
if (next_empty_index_ == 0)
return -1;
@@ -60,12 +56,12 @@ int8_t DTMFqueue::NextDTMF(uint8_t* dtmf_key, uint16_t* len, uint8_t* level) {
}
bool DTMFqueue::PendingDTMF() {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
return next_empty_index_ > 0;
}
void DTMFqueue::ResetDTMF() {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
next_empty_index_ = 0;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
index d1b3f5667cf..c0e616f9827 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -27,7 +27,7 @@ class DTMFqueue {
void ResetDTMF();
private:
- CriticalSectionWrapper* dtmf_critsect_;
+ rtc::CriticalSection dtmf_critsect_;
uint8_t next_empty_index_;
uint8_t dtmf_key_[DTMF_OUTBAND_MAX];
uint16_t dtmf_length[DTMF_OUTBAND_MAX];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
index 2109574e396..83bd2849df4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
@@ -12,11 +12,11 @@
#include <assert.h>
+#include <memory>
+
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
// RFC 5109
namespace webrtc {
@@ -26,8 +26,7 @@ FecReceiver* FecReceiver::Create(RtpData* callback) {
}
FecReceiverImpl::FecReceiverImpl(RtpData* callback)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- recovered_packet_callback_(callback),
+ : recovered_packet_callback_(callback),
fec_(new ForwardErrorCorrection()) {}
FecReceiverImpl::~FecReceiverImpl() {
@@ -42,7 +41,7 @@ FecReceiverImpl::~FecReceiverImpl() {
}
FecPacketCounter FecReceiverImpl::GetPacketCounter() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return packet_counter_;
}
@@ -77,7 +76,7 @@ FecPacketCounter FecReceiverImpl::GetPacketCounter() const {
int32_t FecReceiverImpl::AddReceivedRedPacket(
const RTPHeader& header, const uint8_t* incoming_rtp_packet,
size_t packet_length, uint8_t ulpfec_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
uint8_t REDHeaderLength = 1;
size_t payload_data_length = packet_length - header.headerLength;
@@ -89,7 +88,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
// Add to list without RED header, aka a virtual RTP packet
// we remove the RED header
- rtc::scoped_ptr<ForwardErrorCorrection::ReceivedPacket> received_packet(
+ std::unique_ptr<ForwardErrorCorrection::ReceivedPacket> received_packet(
new ForwardErrorCorrection::ReceivedPacket);
received_packet->pkt = new ForwardErrorCorrection::Packet;
@@ -137,7 +136,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
}
++packet_counter_.num_packets;
- rtc::scoped_ptr<ForwardErrorCorrection::ReceivedPacket>
+ std::unique_ptr<ForwardErrorCorrection::ReceivedPacket>
second_received_packet;
if (blockLength > 0) {
// handle block length, split into 2 packets
@@ -219,21 +218,21 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
}
int32_t FecReceiverImpl::ProcessReceivedFec() {
- crit_sect_->Enter();
+ crit_sect_.Enter();
if (!received_packet_list_.empty()) {
// Send received media packet to VCM.
if (!received_packet_list_.front()->is_fec) {
ForwardErrorCorrection::Packet* packet =
received_packet_list_.front()->pkt;
- crit_sect_->Leave();
+ crit_sect_.Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
- crit_sect_->Enter();
+ crit_sect_.Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
- crit_sect_->Leave();
+ crit_sect_.Leave();
return -1;
}
assert(received_packet_list_.empty());
@@ -246,15 +245,15 @@ int32_t FecReceiverImpl::ProcessReceivedFec() {
continue;
ForwardErrorCorrection::Packet* packet = (*it)->pkt;
++packet_counter_.num_recovered_packets;
- crit_sect_->Leave();
+ crit_sect_.Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
- crit_sect_->Enter();
+ crit_sect_.Enter();
(*it)->returned = true;
}
- crit_sect_->Leave();
+ crit_sect_.Leave();
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
index 6a63813f408..0ebca9bce2d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
@@ -13,7 +13,7 @@
// This header is included to get the nested declaration of Packet structure.
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
@@ -21,8 +21,6 @@
namespace webrtc {
-class CriticalSectionWrapper;
-
class FecReceiverImpl : public FecReceiver {
public:
explicit FecReceiverImpl(RtpData* callback);
@@ -38,7 +36,7 @@ class FecReceiverImpl : public FecReceiver {
FecPacketCounter GetPacketCounter() const override;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
RtpData* recovered_packet_callback_;
ForwardErrorCorrection* fec_;
// TODO(holmer): In the current version received_packet_list_ is never more
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
index ee8f408720e..cd60d9b094c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
@@ -11,10 +11,10 @@
#include <string.h>
#include <list>
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
@@ -92,9 +92,9 @@ class ReceiverFecTest : public ::testing::Test {
uint8_t ulpfec_payload_type);
MockRtpData rtp_data_callback_;
- rtc::scoped_ptr<ForwardErrorCorrection> fec_;
- rtc::scoped_ptr<FecReceiver> receiver_fec_;
- rtc::scoped_ptr<FrameGenerator> generator_;
+ std::unique_ptr<ForwardErrorCorrection> fec_;
+ std::unique_ptr<FecReceiver> receiver_fec_;
+ std::unique_ptr<FrameGenerator> generator_;
};
void DeletePackets(std::list<Packet*>* packets) {
@@ -415,12 +415,12 @@ void ReceiverFecTest::SurvivesMaliciousPacket(const uint8_t* data,
size_t length,
uint8_t ulpfec_payload_type) {
webrtc::RTPHeader header;
- rtc::scoped_ptr<webrtc::RtpHeaderParser> parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(parser->Parse(data, length, &header));
webrtc::NullRtpData null_callback;
- rtc::scoped_ptr<webrtc::FecReceiver> receiver_fec(
+ std::unique_ptr<webrtc::FecReceiver> receiver_fec(
webrtc::FecReceiver::Create(&null_callback));
receiver_fec->AddReceivedRedPacket(header, data, length, ulpfec_payload_type);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
index b85d813790d..623c658a174 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -15,6 +15,7 @@
#include <algorithm>
#include <iterator>
+#include <memory>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -163,7 +164,7 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
// -- Generate packet masks --
// Always allocate space for a large mask.
- rtc::scoped_ptr<uint8_t[]> packet_mask(
+ std::unique_ptr<uint8_t[]> packet_mask(
new uint8_t[num_fec_packets * kMaskSizeLBitSet]);
memset(packet_mask.get(), 0, num_fec_packets * num_mask_bytes);
internal::GeneratePacketMasks(num_media_packets, num_fec_packets,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
index 6d8b4074598..e23a3fa629b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
@@ -9,13 +9,13 @@
*/
#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include <memory>
#include <vector>
#include "webrtc/base/bitbuffer.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
namespace {
@@ -103,7 +103,7 @@ bool H264BitstreamParser::ParseSpsNalu(const uint8_t* sps, size_t length) {
sps_parsed_ = false;
// Parse out the SPS RBSP. It should be small, so it's ok that we create a
// copy. We'll eventually write this back.
- rtc::scoped_ptr<rtc::ByteBufferWriter> sps_rbsp(
+ std::unique_ptr<rtc::ByteBufferWriter> sps_rbsp(
ParseRbsp(sps + kNaluHeaderAndTypeSize, length - kNaluHeaderAndTypeSize));
rtc::BitBuffer sps_parser(reinterpret_cast<const uint8_t*>(sps_rbsp->Data()),
sps_rbsp->Length());
@@ -209,7 +209,7 @@ bool H264BitstreamParser::ParsePpsNalu(const uint8_t* pps, size_t length) {
// We're starting a new stream, so reset picture type rewriting values.
pps_ = PpsState();
pps_parsed_ = false;
- rtc::scoped_ptr<rtc::ByteBufferWriter> buffer(
+ std::unique_ptr<rtc::ByteBufferWriter> buffer(
ParseRbsp(pps + kNaluHeaderAndTypeSize, length - kNaluHeaderAndTypeSize));
rtc::BitBuffer parser(reinterpret_cast<const uint8_t*>(buffer->Data()),
buffer->Length());
@@ -317,7 +317,7 @@ bool H264BitstreamParser::ParseNonParameterSetNalu(const uint8_t* source,
RTC_CHECK(sps_parsed_);
RTC_CHECK(pps_parsed_);
last_slice_qp_delta_parsed_ = false;
- rtc::scoped_ptr<rtc::ByteBufferWriter> slice_rbsp(ParseRbsp(
+ std::unique_ptr<rtc::ByteBufferWriter> slice_rbsp(ParseRbsp(
source + kNaluHeaderAndTypeSize, source_length - kNaluHeaderAndTypeSize));
rtc::BitBuffer slice_reader(
reinterpret_cast<const uint8_t*>(slice_rbsp->Data()),
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index e19c31bfece..9a7b9d3f7ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -11,10 +11,10 @@
#include <algorithm>
#include <iterator>
#include <list>
+#include <memory>
#include <set>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
@@ -105,7 +105,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
size_t packet_length = len;
uint8_t restored_packet[1500];
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
if (!parser->Parse(ptr, len, &header)) {
return false;
}
@@ -191,7 +191,6 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
rtp_rtcp_module_->SetSSRC(kTestSsrc);
rtp_rtcp_module_->SetRTCPStatus(RtcpMode::kCompound);
- rtp_receiver_->SetNACKStatus(kNackRtcp);
rtp_rtcp_module_->SetStorePacketsStatus(true, 600);
EXPECT_EQ(0, rtp_rtcp_module_->SetSendingStatus(true));
rtp_rtcp_module_->SetSequenceNumber(kTestSequenceNumber);
@@ -279,11 +278,11 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
void TearDown() override { delete rtp_rtcp_module_; }
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* rtp_rtcp_module_;
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback_;
RtxLoopBackTransport transport_;
VerifyingRtxReceiver receiver_;
uint8_t payload_data[65000];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
index 69a28ed4dbc..c7ea19db586 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
@@ -157,12 +157,11 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
(ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) {
assert(num_first_partition_ <=
static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets));
- int ret = fec_->GenerateFEC(media_packets_fec_,
- params_.fec_rate,
- num_first_partition_,
- params_.use_uep_protection,
- params_.fec_mask_type,
- &fec_packets_);
+ // TODO(pbos): Consider whether unequal protection should be enabled or not,
+ // it is currently always disabled.
+ int ret = fec_->GenerateFEC(media_packets_fec_, params_.fec_rate,
+ num_first_partition_, false,
+ params_.fec_mask_type, &fec_packets_);
if (fec_packets_.empty()) {
num_frames_ = 0;
DeletePackets();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
index fad0f502f59..ec5228afd5b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <list>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -78,7 +79,7 @@ TEST_F(ProducerFecTest, NoEmptyFecWithSeqNumGaps) {
protected_packets.push_back({12, 3, 54, 0});
protected_packets.push_back({21, 0, 55, 0});
protected_packets.push_back({13, 3, 57, 1});
- FecProtectionParams params = {117, 0, 3, kFecMaskBursty};
+ FecProtectionParams params = {117, 3, kFecMaskBursty};
producer_->SetFecParameters(&params, 0);
uint8_t packet[28] = {0};
for (Packet p : protected_packets) {
@@ -111,7 +112,7 @@ TEST_F(ProducerFecTest, OneFrameFec) {
// of packets is within |kMaxExcessOverhead|, and (2) the total number of
// media packets for 1 frame is at least |minimum_media_packets_fec_|.
const int kNumPackets = 4;
- FecProtectionParams params = {15, false, 3};
+ FecProtectionParams params = {15, 3, kFecMaskRandom};
std::list<test::RawRtpPacket*> rtp_packets;
generator_->NewFrame(kNumPackets);
producer_->SetFecParameters(&params, 0); // Expecting one FEC packet.
@@ -152,7 +153,7 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
const int kNumPackets = 2;
const int kNumFrames = 2;
- FecProtectionParams params = {15, 0, 3};
+ FecProtectionParams params = {15, 3, kFecMaskRandom};
std::list<test::RawRtpPacket*> rtp_packets;
producer_->SetFecParameters(&params, 0); // Expecting one FEC packet.
uint32_t last_timestamp = 0;
@@ -188,7 +189,7 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
TEST_F(ProducerFecTest, BuildRedPacket) {
generator_->NewFrame(1);
test::RawRtpPacket* packet = generator_->NextPacket(0, 10);
- rtc::scoped_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
+ std::unique_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
packet->data, packet->length - kRtpHeaderSize, kRtpHeaderSize,
kRedPayloadType));
EXPECT_EQ(packet->length + 1, red_packet->length());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 022fc9610f4..932be1bb9e1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -12,10 +12,10 @@
#include <math.h>
-#include "webrtc/base/scoped_ptr.h"
+#include <cstdlib>
+
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/time_util.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -29,7 +29,6 @@ StreamStatisticianImpl::StreamStatisticianImpl(
RtcpStatisticsCallback* rtcp_callback,
StreamDataCountersCallback* rtp_callback)
: clock_(clock),
- stream_lock_(CriticalSectionWrapper::CreateCriticalSection()),
incoming_bitrate_(clock, NULL),
ssrc_(0),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
@@ -59,7 +58,7 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
size_t packet_length,
bool retransmitted) {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
bool in_order = InOrderPacketInternal(header.sequenceNumber);
ssrc_ = header.ssrc;
incoming_bitrate_.Update(packet_length);
@@ -116,7 +115,7 @@ void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
(header.timestamp - last_received_timestamp_);
- time_diff_samples = abs(time_diff_samples);
+ time_diff_samples = std::abs(time_diff_samples);
// lib_jingle sometimes deliver crazy jumps in TS for the same stream.
// If this happens, don't update jitter value. Use 5 secs video frequency
@@ -136,7 +135,7 @@ void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
(last_received_timestamp_ +
last_received_transmission_time_offset_));
- time_diff_samples_ext = abs(time_diff_samples_ext);
+ time_diff_samples_ext = std::abs(time_diff_samples_ext);
if (time_diff_samples_ext < 450000) {
int32_t jitter_diffQ4TransmissionTimeOffset =
@@ -150,7 +149,7 @@ void StreamStatisticianImpl::NotifyRtpCallback() {
StreamDataCounters data;
uint32_t ssrc;
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
data = receive_counters_;
ssrc = ssrc_;
}
@@ -161,7 +160,7 @@ void StreamStatisticianImpl::NotifyRtcpCallback() {
RtcpStatistics data;
uint32_t ssrc;
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
data = last_reported_statistics_;
ssrc = ssrc_;
}
@@ -171,7 +170,7 @@ void StreamStatisticianImpl::NotifyRtcpCallback() {
void StreamStatisticianImpl::FecPacketReceived(const RTPHeader& header,
size_t packet_length) {
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
receive_counters_.fec.AddPacket(packet_length, header);
}
NotifyRtpCallback();
@@ -179,14 +178,14 @@ void StreamStatisticianImpl::FecPacketReceived(const RTPHeader& header,
void StreamStatisticianImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
max_reordering_threshold_ = max_reordering_threshold;
}
bool StreamStatisticianImpl::GetStatistics(RtcpStatistics* statistics,
bool reset) {
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (received_seq_first_ == 0 &&
receive_counters_.transmitted.payload_bytes == 0) {
// We have not received anything.
@@ -282,7 +281,7 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
void StreamStatisticianImpl::GetDataCounters(
size_t* bytes_received, uint32_t* packets_received) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (bytes_received) {
*bytes_received = receive_counters_.transmitted.payload_bytes +
receive_counters_.transmitted.header_bytes +
@@ -295,30 +294,30 @@ void StreamStatisticianImpl::GetDataCounters(
void StreamStatisticianImpl::GetReceiveStreamDataCounters(
StreamDataCounters* data_counters) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
*data_counters = receive_counters_;
}
uint32_t StreamStatisticianImpl::BitrateReceived() const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
return incoming_bitrate_.BitrateNow();
}
void StreamStatisticianImpl::ProcessBitrate() {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
incoming_bitrate_.Process();
}
void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs,
uint32_t* frac) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
*secs = last_receive_time_ntp_.seconds();
*frac = last_receive_time_ntp_.fractions();
}
bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
const RTPHeader& header, int64_t min_rtt) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (InOrderPacketInternal(header.sequenceNumber)) {
return false;
}
@@ -352,7 +351,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
}
bool StreamStatisticianImpl::IsPacketInOrder(uint16_t sequence_number) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
return InOrderPacketInternal(sequence_number);
}
@@ -377,7 +376,6 @@ ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: clock_(clock),
- receive_statistics_lock_(CriticalSectionWrapper::CreateCriticalSection()),
last_rate_update_ms_(0),
rtcp_stats_callback_(NULL),
rtp_stats_callback_(NULL) {}
@@ -394,7 +392,7 @@ void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
bool retransmitted) {
StreamStatisticianImpl* impl;
{
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
if (it != statisticians_.end()) {
impl = it->second;
@@ -412,7 +410,7 @@ void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
void ReceiveStatisticsImpl::FecPacketReceived(const RTPHeader& header,
size_t packet_length) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
// Ignore FEC if it is the first packet.
if (it != statisticians_.end()) {
@@ -421,7 +419,7 @@ void ReceiveStatisticsImpl::FecPacketReceived(const RTPHeader& header,
}
StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianMap active_statisticians;
for (StatisticianImplMap::const_iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
@@ -438,7 +436,7 @@ StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
uint32_t ssrc) const {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::const_iterator it = statisticians_.find(ssrc);
if (it == statisticians_.end())
return NULL;
@@ -447,7 +445,7 @@ StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->SetMaxReorderingThreshold(max_reordering_threshold);
@@ -455,7 +453,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
}
void ReceiveStatisticsImpl::Process() {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->ProcessBitrate();
@@ -464,7 +462,7 @@ void ReceiveStatisticsImpl::Process() {
}
int64_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
int64_t time_since_last_update = clock_->TimeInMilliseconds() -
last_rate_update_ms_;
return std::max<int64_t>(
@@ -473,7 +471,7 @@ int64_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (callback != NULL)
assert(rtcp_stats_callback_ == NULL);
rtcp_stats_callback_ = callback;
@@ -481,20 +479,20 @@ void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
void ReceiveStatisticsImpl::StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtcp_stats_callback_)
rtcp_stats_callback_->StatisticsUpdated(statistics, ssrc);
}
void ReceiveStatisticsImpl::CNameChanged(const char* cname, uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtcp_stats_callback_)
rtcp_stats_callback_->CNameChanged(cname, ssrc);
}
void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (callback != NULL)
assert(rtp_stats_callback_ == NULL);
rtp_stats_callback_ = callback;
@@ -502,7 +500,7 @@ void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
void ReceiveStatisticsImpl::DataCountersUpdated(const StreamDataCounters& stats,
uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtp_stats_callback_) {
rtp_stats_callback_->DataCountersUpdated(stats, ssrc);
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
index 6da8334da6e..39679673d08 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -16,15 +16,12 @@
#include <algorithm>
#include <map>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/ntp_time.h"
namespace webrtc {
-class CriticalSectionWrapper;
-
class StreamStatisticianImpl : public StreamStatistician {
public:
StreamStatisticianImpl(Clock* clock,
@@ -57,11 +54,11 @@ class StreamStatisticianImpl : public StreamStatistician {
void UpdateCounters(const RTPHeader& rtp_header,
size_t packet_length,
bool retransmitted);
- void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_.get());
- void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_.get());
+ void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_);
+ void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_);
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> stream_lock_;
+ rtc::CriticalSection stream_lock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
int max_reordering_threshold_; // In number of packets or sequence numbers.
@@ -131,7 +128,7 @@ class ReceiveStatisticsImpl : public ReceiveStatistics,
typedef std::map<uint32_t, StreamStatisticianImpl*> StatisticianImplMap;
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> receive_statistics_lock_;
+ rtc::CriticalSection receive_statistics_lock_;
int64_t last_rate_update_ms_;
StatisticianImplMap statisticians_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
index c265c17c04b..f6cbe74e478 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -36,7 +37,7 @@ class ReceiveStatisticsTest : public ::testing::Test {
protected:
SimulatedClock clock_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPHeader header1_;
RTPHeader header2_;
};
@@ -156,8 +157,8 @@ TEST_F(ReceiveStatisticsTest, RtcpCallbacks) {
: RtcpStatisticsCallback(), num_calls_(0), ssrc_(0), stats_() {}
virtual ~TestCallback() {}
- virtual void StatisticsUpdated(const RtcpStatistics& statistics,
- uint32_t ssrc) {
+ void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) override {
ssrc_ = ssrc;
stats_ = statistics;
++num_calls_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
index 183076ff591..bbfb52c6cc9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
@@ -74,13 +76,13 @@ class RtcpFormatRembTest : public ::testing::Test {
OverUseDetectorOptions over_use_detector_options_;
Clock* system_clock_;
ModuleRtpRtcpImpl* dummy_rtp_rtcp_impl_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTCPSender* rtcp_sender_;
RTCPReceiver* rtcp_receiver_;
TestTransport* test_transport_;
test::NullTransport null_transport_;
MockRemoteBitrateObserver remote_bitrate_observer_;
- rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
};
void RtcpFormatRembTest::SetUp() {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
index 16bd3fc2a2d..f5a885cfdd7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_APP_H_
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
index af3fbacc809..ad28cb39e7b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
index 4fb92facc73..06b67c375cf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
index 49de7be1a80..34eb57f3484 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
index a85576db2da..d7e715bc9ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
index fb2be113a2b..b6acae5aab1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
index 10fafd229da..64caf1b5c41 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
@@ -11,6 +11,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
index 866eb2ce529..8568e7327c7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RAPID_RESYNC_REQUEST_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
index 237d923cd7a..0630adbac24 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
@@ -49,7 +50,7 @@ class ReceiverReport : public RtcpPacket {
static const size_t kRrBaseLength = 4;
static const size_t kMaxNumberOfReportBlocks = 0x1F;
- size_t BlockLength() const {
+ size_t BlockLength() const override {
return kHeaderLength + kRrBaseLength +
report_blocks_.size() * ReportBlock::kLength;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
index 3b33982a838..2f59fbbd55f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
@@ -13,11 +13,11 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-
-using webrtc::RTCPUtility::RtcpCommonHeader;
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/common_header.h"
namespace webrtc {
namespace rtcp {
+constexpr uint8_t Remb::kFeedbackMessageType;
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
//
// 0 1 2 3
@@ -36,32 +36,39 @@ namespace rtcp {
// 16 | SSRC feedback |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// : ... :
-bool Remb::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
- RTC_DCHECK(header.packet_type == kPacketType);
- RTC_DCHECK(header.count_or_format == kFeedbackMessageType);
+bool Remb::Parse(const CommonHeader& packet) {
+ RTC_DCHECK(packet.type() == kPacketType);
+ RTC_DCHECK_EQ(packet.fmt(), kFeedbackMessageType);
- if (header.payload_size_bytes < 16) {
- LOG(LS_WARNING) << "Payload length " << header.payload_size_bytes
+ if (packet.payload_size_bytes() < 16) {
+ LOG(LS_WARNING) << "Payload length " << packet.payload_size_bytes()
<< " is too small for Remb packet.";
return false;
}
+ const uint8_t* const payload = packet.payload();
if (kUniqueIdentifier != ByteReader<uint32_t>::ReadBigEndian(&payload[8])) {
LOG(LS_WARNING) << "REMB identifier not found, not a REMB packet.";
return false;
}
uint8_t number_of_ssrcs = payload[12];
- if (header.payload_size_bytes !=
+ if (packet.payload_size_bytes() !=
kCommonFeedbackLength + (2 + number_of_ssrcs) * 4) {
- LOG(LS_WARNING) << "Payload size " << header.payload_size_bytes
+ LOG(LS_WARNING) << "Payload size " << packet.payload_size_bytes()
<< " does not match " << number_of_ssrcs << " ssrcs.";
return false;
}
ParseCommonFeedback(payload);
uint8_t exponenta = payload[13] >> 2;
- uint32_t mantissa = (static_cast<uint32_t>(payload[13] & 0x03) << 16) |
+ uint64_t mantissa = (static_cast<uint32_t>(payload[13] & 0x03) << 16) |
ByteReader<uint16_t>::ReadBigEndian(&payload[14]);
bitrate_bps_ = (mantissa << exponenta);
+ bool shift_overflow = (bitrate_bps_ >> exponenta) != mantissa;
+ if (shift_overflow) {
+ LOG(LS_ERROR) << "Invalid remb bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exponenta);
+ return false;
+ }
const uint8_t* next_ssrc = payload + 16;
ssrcs_.clear();
@@ -111,7 +118,7 @@ bool Remb::Create(uint8_t* packet,
ByteWriter<uint32_t>::WriteBigEndian(packet + *index, kUniqueIdentifier);
*index += sizeof(uint32_t);
const uint32_t kMaxMantissa = 0x3ffff; // 18 bits.
- uint32_t mantissa = bitrate_bps_;
+ uint64_t mantissa = bitrate_bps_;
uint8_t exponenta = 0;
while (mantissa > kMaxMantissa) {
mantissa >>= 1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
index d58f052b145..9f10921c994 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
@@ -14,28 +14,29 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
namespace webrtc {
namespace rtcp {
+class CommonHeader;
+
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
class Remb : public Psfb {
public:
- static const uint8_t kFeedbackMessageType = 15;
+ static constexpr uint8_t kFeedbackMessageType = 15;
Remb() : bitrate_bps_(0) {}
~Remb() override {}
// Parse assumes header is already parsed and validated.
- bool Parse(const RTCPUtility::RtcpCommonHeader& header,
- const uint8_t* payload); // Size of the payload is in the header.
+ bool Parse(const CommonHeader& packet);
bool AppliesTo(uint32_t ssrc);
bool AppliesToMany(const std::vector<uint32_t>& ssrcs);
- void WithBitrateBps(uint32_t bitrate_bps) { bitrate_bps_ = bitrate_bps; }
+ void WithBitrateBps(uint64_t bitrate_bps) { bitrate_bps_ = bitrate_bps; }
- uint32_t bitrate_bps() const { return bitrate_bps_; }
+ uint64_t bitrate_bps() const { return bitrate_bps_; }
const std::vector<uint32_t>& ssrcs() const { return ssrcs_; }
protected:
@@ -49,14 +50,14 @@ class Remb : public Psfb {
}
private:
- static const size_t kMaxNumberOfSsrcs = 0xff;
- static const uint32_t kUniqueIdentifier = 0x52454D42; // 'R' 'E' 'M' 'B'.
+ static constexpr size_t kMaxNumberOfSsrcs = 0xff;
+ static constexpr uint32_t kUniqueIdentifier = 0x52454D42; // 'R' 'E' 'M' 'B'.
// Media ssrc is unused, shadow base class setter and getter.
void To(uint32_t);
uint32_t media_ssrc() const;
- uint32_t bitrate_bps_;
+ uint64_t bitrate_bps_;
std::vector<uint32_t> ssrcs_;
RTC_DISALLOW_COPY_AND_ASSIGN(Remb);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
index ee06972e2b0..d504143f6f6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
@@ -12,32 +12,25 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/rtcp_packet_parser.h"
using testing::ElementsAreArray;
using testing::IsEmpty;
using testing::make_tuple;
using webrtc::rtcp::Remb;
-using webrtc::RTCPUtility::RtcpCommonHeader;
-using webrtc::RTCPUtility::RtcpParseCommonHeader;
namespace webrtc {
namespace {
-
const uint32_t kSenderSsrc = 0x12345678;
const uint32_t kRemoteSsrcs[] = {0x23456789, 0x2345678a, 0x2345678b};
const uint32_t kBitrateBps = 0x3fb93 * 2; // 522022;
+const uint64_t kBitrateBps64bit = 0x3fb93ULL << 30;
const uint8_t kPacket[] = {0x8f, 206, 0x00, 0x07, 0x12, 0x34, 0x56, 0x78,
0x00, 0x00, 0x00, 0x00, 'R', 'E', 'M', 'B',
0x03, 0x07, 0xfb, 0x93, 0x23, 0x45, 0x67, 0x89,
0x23, 0x45, 0x67, 0x8a, 0x23, 0x45, 0x67, 0x8b};
const size_t kPacketLength = sizeof(kPacket);
-
-bool ParseRemb(const uint8_t* buffer, size_t length, Remb* remb) {
- RtcpCommonHeader header;
- EXPECT_TRUE(RtcpParseCommonHeader(buffer, length, &header));
- EXPECT_EQ(length, header.BlockSize());
- return remb->Parse(header, buffer + RtcpCommonHeader::kHeaderSizeBytes);
-}
+} // namespace
TEST(RtcpPacketRembTest, Create) {
Remb remb;
@@ -55,7 +48,7 @@ TEST(RtcpPacketRembTest, Create) {
TEST(RtcpPacketRembTest, Parse) {
Remb remb;
- EXPECT_TRUE(ParseRemb(kPacket, kPacketLength, &remb));
+ EXPECT_TRUE(test::ParseSinglePacket(kPacket, &remb));
const Remb& parsed = remb;
EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
@@ -70,19 +63,31 @@ TEST(RtcpPacketRembTest, CreateAndParseWithoutSsrcs) {
rtc::Buffer packet = remb.Build();
Remb parsed;
- EXPECT_TRUE(ParseRemb(packet.data(), packet.size(), &parsed));
+ EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed));
EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
EXPECT_EQ(kBitrateBps, parsed.bitrate_bps());
EXPECT_THAT(parsed.ssrcs(), IsEmpty());
}
+TEST(RtcpPacketRembTest, CreateAndParse64bitBitrate) {
+ Remb remb;
+ remb.WithBitrateBps(kBitrateBps64bit);
+ rtc::Buffer packet = remb.Build();
+
+ Remb parsed;
+ EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed));
+ EXPECT_EQ(kBitrateBps64bit, parsed.bitrate_bps());
+}
+
TEST(RtcpPacketRembTest, ParseFailsOnTooSmallPacketToBeRemb) {
- uint8_t packet[kPacketLength];
- memcpy(packet, kPacket, kPacketLength);
- packet[3] = 3; // Make it too small.
+ // Make it too small.
+ constexpr size_t kTooSmallSize = (1 + 3) * 4;
+ uint8_t packet[kTooSmallSize];
+ memcpy(packet, kPacket, kTooSmallSize);
+ packet[3] = 3;
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, (1 + 3) * 4, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, ParseFailsWhenUniqueIdentifierIsNotRemb) {
@@ -91,7 +96,17 @@ TEST(RtcpPacketRembTest, ParseFailsWhenUniqueIdentifierIsNotRemb) {
packet[12] = 'N'; // Swap 'R' -> 'N' in the 'REMB' unique identifier.
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, kPacketLength, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
+}
+
+TEST(RtcpPacketRembTest, ParseFailsWhenBitrateDoNotFitIn64bits) {
+ uint8_t packet[kPacketLength];
+ memcpy(packet, kPacket, kPacketLength);
+ packet[17] |= 0xfc; // Set exponenta component to maximum of 63.
+ packet[19] |= 0x02; // Ensure mantissa is at least 2.
+
+ Remb remb;
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, ParseFailsWhenSsrcCountMismatchLength) {
@@ -100,7 +115,7 @@ TEST(RtcpPacketRembTest, ParseFailsWhenSsrcCountMismatchLength) {
packet[16]++; // Swap 3 -> 4 in the ssrcs count.
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, kPacketLength, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, TooManySsrcs) {
@@ -126,5 +141,5 @@ TEST(RtcpPacketRembTest, TooManySsrcsForBatchAssign) {
// But not for another one.
EXPECT_FALSE(remb.AppliesTo(kRemoteSsrc));
}
-} // namespace
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
index 1fa3352335e..7d4895b09cd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RPSI_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
index 5940edbb116..19d5b42b557 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
index e11bdb9a942..a544017f149 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "webrtc/system_wrappers/include/ntp_time.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
index 5d9e6c93e95..7b6b24f3b8d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
index 26a44082d35..c84d0dfe0b9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
index 4028563d071..15bfc5856fa 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
index 4ad49561b80..5cdaa3aaa47 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
@@ -651,10 +651,10 @@ bool TransportFeedback::Create(uint8_t* packet,
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// De-serialize packet.
-rtc::scoped_ptr<TransportFeedback> TransportFeedback::ParseFrom(
+std::unique_ptr<TransportFeedback> TransportFeedback::ParseFrom(
const uint8_t* buffer,
size_t length) {
- rtc::scoped_ptr<TransportFeedback> packet(new TransportFeedback());
+ std::unique_ptr<TransportFeedback> packet(new TransportFeedback());
if (length < kMinSizeBytes) {
LOG(LS_WARNING) << "Buffer too small (" << length
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
index ad6fd166f2b..7a74d7ffe83 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TRANSPORT_FEEDBACK_H_
#include <deque>
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
@@ -58,7 +59,7 @@ class TransportFeedback : public RtcpPacket {
static const uint8_t kFeedbackMessageType = 15; // TODO(sprang): IANA reg?
static const uint8_t kPayloadType = 205; // RTPFB, see RFC4585.
- static rtc::scoped_ptr<TransportFeedback> ParseFrom(const uint8_t* buffer,
+ static std::unique_ptr<TransportFeedback> ParseFrom(const uint8_t* buffer,
size_t length);
protected:
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
index 3615065351d..203d70fab15 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include <limits>
+#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
@@ -43,7 +44,7 @@ class FeedbackTester {
void WithInput(const uint16_t received_seq[],
const int64_t received_ts[],
uint16_t length) {
- rtc::scoped_ptr<int64_t[]> temp_deltas;
+ std::unique_ptr<int64_t[]> temp_deltas;
if (received_ts == nullptr) {
temp_deltas.reset(new int64_t[length]);
GenerateDeltas(received_seq, length, temp_deltas.get());
@@ -136,7 +137,7 @@ class FeedbackTester {
std::vector<int64_t> expected_deltas_;
size_t expected_size_;
int64_t default_delta_;
- rtc::scoped_ptr<TransportFeedback> feedback_;
+ std::unique_ptr<TransportFeedback> feedback_;
rtc::Buffer serialized_;
};
@@ -356,7 +357,7 @@ TEST(RtcpPacketTest, TransportFeedback_Aliasing) {
TEST(RtcpPacketTest, TransportFeedback_Limits) {
// Sequence number wrap above 0x8000.
- rtc::scoped_ptr<TransportFeedback> packet(new TransportFeedback());
+ std::unique_ptr<TransportFeedback> packet(new TransportFeedback());
packet->WithBase(0, 0);
EXPECT_TRUE(packet->WithReceivedPacket(0x8000, 1000));
@@ -446,7 +447,7 @@ TEST(RtcpPacketTest, TransportFeedback_Padding) {
&mod_buffer[2], ByteReader<uint16_t>::ReadBigEndian(&mod_buffer[2]) +
((kPaddingBytes + 3) / 4));
- rtc::scoped_ptr<TransportFeedback> parsed_packet(
+ std::unique_ptr<TransportFeedback> parsed_packet(
TransportFeedback::ParseFrom(mod_buffer, kExpectedSizeWithPadding));
ASSERT_TRUE(parsed_packet.get() != nullptr);
EXPECT_EQ(kExpectedSizeWords * 4, packet.size()); // Padding not included.
@@ -468,7 +469,7 @@ TEST(RtcpPacketTest, TransportFeedback_CorrectlySplitsVectorChunks) {
feedback.WithReceivedPacket(deltas, deltas * 1000 + kLargeTimeDelta);
rtc::Buffer serialized_packet = feedback.Build();
- rtc::scoped_ptr<TransportFeedback> deserialized_packet =
+ std::unique_ptr<TransportFeedback> deserialized_packet =
TransportFeedback::ParseFrom(serialized_packet.data(),
serialized_packet.size());
EXPECT_TRUE(deserialized_packet.get() != nullptr);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
index 1b5d4f32a62..58218ddbfd1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
@@ -6,8 +6,6 @@
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
- *
- * This file includes unit tests for the RtcpPacket.
*/
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index 0faf2a42575..3e8e47fbd37 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -49,13 +49,9 @@ RTCPReceiver::RTCPReceiver(
receiver_only_(receiver_only),
_lastReceived(0),
_rtpRtcp(*owner),
- _criticalSectionFeedbacks(
- CriticalSectionWrapper::CreateCriticalSection()),
_cbRtcpBandwidthObserver(rtcp_bandwidth_observer),
_cbRtcpIntraFrameObserver(rtcp_intra_frame_observer),
_cbTransportFeedbackObserver(transport_feedback_observer),
- _criticalSectionRTCPReceiver(
- CriticalSectionWrapper::CreateCriticalSection()),
main_ssrc_(0),
_remoteSSRC(0),
_remoteSenderInfo(),
@@ -76,9 +72,6 @@ RTCPReceiver::RTCPReceiver(
}
RTCPReceiver::~RTCPReceiver() {
- delete _criticalSectionRTCPReceiver;
- delete _criticalSectionFeedbacks;
-
ReportBlockMap::iterator it = _receivedReportBlockMap.begin();
for (; it != _receivedReportBlockMap.end(); ++it) {
ReportBlockInfoMap* info_map = &(it->second);
@@ -103,12 +96,12 @@ RTCPReceiver::~RTCPReceiver() {
}
int64_t RTCPReceiver::LastReceived() {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
return _lastReceived;
}
int64_t RTCPReceiver::LastReceivedReceiverReport() const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
int64_t last_received_rr = -1;
for (ReceivedInfoMap::const_iterator it = _receivedInfoMap.begin();
it != _receivedInfoMap.end(); ++it) {
@@ -120,7 +113,7 @@ int64_t RTCPReceiver::LastReceivedReceiverReport() const {
}
void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
// new SSRC reset old reports
memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
@@ -131,7 +124,7 @@ void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
}
uint32_t RTCPReceiver::RemoteSSRC() const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
return _remoteSSRC;
}
@@ -139,7 +132,7 @@ void RTCPReceiver::SetSsrcs(uint32_t main_ssrc,
const std::set<uint32_t>& registered_ssrcs) {
uint32_t old_ssrc = 0;
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
old_ssrc = main_ssrc_;
main_ssrc_ = main_ssrc;
registered_ssrcs_ = registered_ssrcs;
@@ -156,7 +149,7 @@ int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
int64_t* avgRTT,
int64_t* minRTT,
int64_t* maxRTT) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
RTCPReportBlockInformation* reportBlock =
GetReportBlockInformation(remoteSSRC, main_ssrc_);
@@ -180,13 +173,13 @@ int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
}
void RTCPReceiver::SetRtcpXrRrtrStatus(bool enable) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
xr_rrtr_status_ = enable;
}
bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) {
assert(rtt_ms);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (xr_rr_rtt_ms_ == 0) {
return false;
}
@@ -202,7 +195,7 @@ bool RTCPReceiver::NTP(uint32_t* ReceivedNTPsecs,
uint32_t* RTCPArrivalTimeFrac,
uint32_t* rtcp_timestamp) const
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if(ReceivedNTPsecs)
{
*ReceivedNTPsecs = _remoteSenderInfo.NTPseconds; // NTP from incoming SendReport
@@ -228,7 +221,7 @@ bool RTCPReceiver::NTP(uint32_t* ReceivedNTPsecs,
bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
RtcpReceiveTimeInfo* info) const {
assert(info);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedXRNTPsecs == 0 && _lastReceivedXRNTPfrac == 0) {
return false;
}
@@ -251,7 +244,7 @@ bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
assert(senderInfo);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedSRNTPsecs == 0) {
return -1;
}
@@ -264,7 +257,7 @@ int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
int32_t RTCPReceiver::StatisticsReceived(
std::vector<RTCPReportBlock>* receiveBlocks) const {
assert(receiveBlocks);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
ReportBlockMap::const_iterator it = _receivedReportBlockMap.begin();
for (; it != _receivedReportBlockMap.end(); ++it) {
const ReportBlockInfoMap* info_map = &(it->second);
@@ -280,7 +273,7 @@ int32_t
RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
RTCPUtility::RTCPParserV2* rtcpParser)
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
_lastReceived = _clock->TimeInMilliseconds();
@@ -590,7 +583,7 @@ RTCPReportBlockInformation* RTCPReceiver::GetReportBlockInformation(
RTCPCnameInformation*
RTCPReceiver::CreateCnameInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPCnameInformation*>::iterator it =
_receivedCnameMap.find(remoteSSRC);
@@ -606,7 +599,7 @@ RTCPReceiver::CreateCnameInformation(uint32_t remoteSSRC) {
RTCPCnameInformation*
RTCPReceiver::GetCnameInformation(uint32_t remoteSSRC) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPCnameInformation*>::const_iterator it =
_receivedCnameMap.find(remoteSSRC);
@@ -619,7 +612,7 @@ RTCPReceiver::GetCnameInformation(uint32_t remoteSSRC) const {
RTCPReceiveInformation*
RTCPReceiver::CreateReceiveInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator it =
_receivedInfoMap.find(remoteSSRC);
@@ -634,7 +627,7 @@ RTCPReceiver::CreateReceiveInformation(uint32_t remoteSSRC) {
RTCPReceiveInformation*
RTCPReceiver::GetReceiveInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator it =
_receivedInfoMap.find(remoteSSRC);
@@ -651,7 +644,7 @@ void RTCPReceiver::UpdateReceiveInformation(
}
bool RTCPReceiver::RtcpRrTimeout(int64_t rtcp_interval_ms) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedRrMs == 0)
return false;
@@ -665,7 +658,7 @@ bool RTCPReceiver::RtcpRrTimeout(int64_t rtcp_interval_ms) {
}
bool RTCPReceiver::RtcpRrSequenceNumberTimeout(int64_t rtcp_interval_ms) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastIncreasedSequenceNumberMs == 0)
return false;
@@ -680,7 +673,7 @@ bool RTCPReceiver::RtcpRrSequenceNumberTimeout(int64_t rtcp_interval_ms) {
}
bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
bool updateBoundingSet = false;
int64_t timeNow = _clock->TimeInMilliseconds();
@@ -724,7 +717,7 @@ bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
}
int32_t RTCPReceiver::BoundingSet(bool* tmmbrOwner, TMMBRSet* boundingSetRec) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator receiveInfoIt =
_receivedInfoMap.find(_remoteSSRC);
@@ -773,7 +766,7 @@ void RTCPReceiver::HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser) {
cnameInfo->name[RTCP_CNAME_SIZE - 1] = 0;
strncpy(cnameInfo->name, rtcpPacket.CName.CName, RTCP_CNAME_SIZE - 1);
{
- CriticalSectionScoped lock(_criticalSectionFeedbacks);
+ rtc::CritScope lock(&_criticalSectionFeedbacks);
if (stats_callback_ != NULL) {
stats_callback_->CNameChanged(rtcpPacket.CName.CName,
rtcpPacket.CName.SenderSSRC);
@@ -1283,12 +1276,12 @@ int32_t RTCPReceiver::UpdateTMMBR() {
void RTCPReceiver::RegisterRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
stats_callback_ = callback;
}
RtcpStatisticsCallback* RTCPReceiver::GetRtcpStatisticsCallback() {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
return stats_callback_;
}
@@ -1305,7 +1298,7 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
std::set<uint32_t> registered_ssrcs;
{
// We don't want to hold this critsect when triggering the callbacks below.
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
local_ssrc = main_ssrc_;
registered_ssrcs = registered_ssrcs_;
}
@@ -1378,7 +1371,7 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (!receiver_only_) {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
if (stats_callback_) {
for (ReportBlockList::const_iterator it =
rtcpPacketInformation.report_blocks.begin();
@@ -1400,7 +1393,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
char cName[RTCP_CNAME_SIZE]) const {
assert(cName);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
RTCPCnameInformation* cnameInfo = GetCnameInformation(remoteSSRC);
if (cnameInfo == NULL) {
return -1;
@@ -1414,7 +1407,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
int32_t RTCPReceiver::TMMBRReceived(uint32_t size,
uint32_t accNumCandidates,
TMMBRSet* candidateSet) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::const_iterator
receiveInfoIt = _receivedInfoMap.begin();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index 475ab1e26f7..28c28cb69bb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -15,6 +15,7 @@
#include <set>
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h"
@@ -267,12 +268,12 @@ protected:
int64_t _lastReceived;
ModuleRtpRtcpImpl& _rtpRtcp;
- CriticalSectionWrapper* _criticalSectionFeedbacks;
+ rtc::CriticalSection _criticalSectionFeedbacks;
RtcpBandwidthObserver* const _cbRtcpBandwidthObserver;
RtcpIntraFrameObserver* const _cbRtcpIntraFrameObserver;
TransportFeedbackObserver* const _cbTransportFeedbackObserver;
- CriticalSectionWrapper* _criticalSectionRTCPReceiver;
+ rtc::CriticalSection _criticalSectionRTCPReceiver;
uint32_t main_ssrc_ GUARDED_BY(_criticalSectionRTCPReceiver);
uint32_t _remoteSSRC GUARDED_BY(_criticalSectionRTCPReceiver);
std::set<uint32_t> registered_ssrcs_ GUARDED_BY(_criticalSectionRTCPReceiver);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
index a5c0e282828..bfcc1bdfde0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
@@ -42,14 +42,13 @@ RTCPPacketInformation::RTCPPacketInformation()
RTCPPacketInformation::~RTCPPacketInformation()
{
delete [] applicationData;
- delete VoIPMetric;
}
void
RTCPPacketInformation::AddVoIPMetric(const RTCPVoIPMetric* metric)
{
- VoIPMetric = new RTCPVoIPMetric();
- memcpy(VoIPMetric, metric, sizeof(RTCPVoIPMetric));
+ VoIPMetric.reset(new RTCPVoIPMetric());
+ memcpy(VoIPMetric.get(), metric, sizeof(RTCPVoIPMetric));
}
void RTCPPacketInformation::AddApplicationData(const uint8_t* data,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
index a7928419627..40d1220069c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -12,10 +12,10 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
#include <list>
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPReportBlock
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
@@ -87,9 +87,9 @@ public:
uint32_t xr_originator_ssrc;
bool xr_dlrr_item;
- RTCPVoIPMetric* VoIPMetric;
+ std::unique_ptr<RTCPVoIPMetric> VoIPMetric;
- rtc::scoped_ptr<rtcp::TransportFeedback> transport_feedback_;
+ std::unique_ptr<rtcp::TransportFeedback> transport_feedback_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(RTCPPacketInformation);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index 08f109bc297..924d009883d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -8,10 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
-/*
- * This file includes unit tests for the RTCPReceiver.
- */
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -136,7 +134,7 @@ class RtcpReceiverTest : public ::testing::Test {
rtcp_packet_info_.rtp_timestamp = rtcpPacketInformation.rtp_timestamp;
rtcp_packet_info_.xr_dlrr_item = rtcpPacketInformation.xr_dlrr_item;
if (rtcpPacketInformation.VoIPMetric)
- rtcp_packet_info_.AddVoIPMetric(rtcpPacketInformation.VoIPMetric);
+ rtcp_packet_info_.AddVoIPMetric(rtcpPacketInformation.VoIPMetric.get());
rtcp_packet_info_.transport_feedback_.reset(
rtcpPacketInformation.transport_feedback_.release());
return 0;
@@ -149,7 +147,7 @@ class RtcpReceiverTest : public ::testing::Test {
TestTransport* test_transport_;
RTCPHelp::RTCPPacketInformation rtcp_packet_info_;
MockRemoteBitrateObserver remote_bitrate_observer_;
- rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
};
@@ -1244,6 +1242,20 @@ TEST_F(RtcpReceiverTest, ReceivesTransportFeedback) {
EXPECT_TRUE(rtcp_packet_info_.transport_feedback_.get() != nullptr);
}
+TEST_F(RtcpReceiverTest, ReceivesRemb) {
+ const uint32_t kSenderSsrc = 0x123456;
+ const uint32_t kBitrateBps = 500000;
+ rtcp::Remb remb;
+ remb.From(kSenderSsrc);
+ remb.WithBitrateBps(kBitrateBps);
+ rtc::Buffer built_packet = remb.Build();
+
+ EXPECT_EQ(0, InjectRtcpPacket(built_packet.data(), built_packet.size()));
+
+ EXPECT_EQ(kRtcpRemb, rtcp_packet_info_.rtcpPacketTypeFlags & kRtcpRemb);
+ EXPECT_EQ(kBitrateBps, rtcp_packet_info_.receiverEstimatedMaxBitrate);
+}
+
TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) {
const uint32_t kSenderSsrc = 0x10203;
const uint32_t kSourceSsrc = 0x123456;
@@ -1261,7 +1273,7 @@ TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) {
static uint32_t kBitrateBps = 50000;
rtcp::Remb remb;
- remb.From(kSourceSsrc);
+ remb.From(kSenderSsrc);
remb.WithBitrateBps(kBitrateBps);
rtcp::CompoundPacket compound;
compound.Append(&packet);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index 95bfeeea1f6..4a509b001ea 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -13,6 +13,7 @@
#include <string.h> // memcpy
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/call.h"
@@ -638,16 +639,11 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBN(
const RtcpContext& ctx) {
- TMMBRSet* boundingSet = tmmbr_help_.BoundingSetToSend();
- if (boundingSet == nullptr)
- return nullptr;
-
rtcp::Tmmbn* tmmbn = new rtcp::Tmmbn();
tmmbn->From(ssrc_);
- for (uint32_t i = 0; i < boundingSet->lengthOfSet(); i++) {
- if (boundingSet->Tmmbr(i) > 0) {
- tmmbn->WithTmmbr(boundingSet->Ssrc(i), boundingSet->Tmmbr(i),
- boundingSet->PacketOH(i));
+ for (const rtcp::TmmbItem& tmmbr : tmmbn_to_send_) {
+ if (tmmbr.bitrate_bps() > 0) {
+ tmmbn->WithTmmbr(tmmbr);
}
}
@@ -871,11 +867,13 @@ void RTCPSender::PrepareReport(const std::set<RTCPPacketType>& packetTypes,
random_.Rand(minIntervalMs * 1 / 2, minIntervalMs * 3 / 2);
next_time_to_send_rtcp_ = clock_->TimeInMilliseconds() + timeToNext;
- StatisticianMap statisticians =
- receive_statistics_->GetActiveStatisticians();
- RTC_DCHECK(report_blocks_.empty());
- for (auto& it : statisticians) {
- AddReportBlock(feedback_state, it.first, it.second);
+ if (receive_statistics_) {
+ StatisticianMap statisticians =
+ receive_statistics_->GetActiveStatisticians();
+ RTC_DCHECK(report_blocks_.empty());
+ for (auto& it : statisticians) {
+ AddReportBlock(feedback_state, it.first, it.second);
+ }
}
}
}
@@ -972,14 +970,14 @@ bool RTCPSender::RtcpXrReceiverReferenceTime() const {
}
// no callbacks allowed inside this function
-int32_t RTCPSender::SetTMMBN(const TMMBRSet* boundingSet) {
+void RTCPSender::SetTMMBN(const std::vector<rtcp::TmmbItem>* bounding_set) {
rtc::CritScope lock(&critical_section_rtcp_sender_);
-
- if (0 == tmmbr_help_.SetTMMBRBoundingSetToSend(boundingSet)) {
- SetFlag(kRtcpTmmbn, true);
- return 0;
+ if (bounding_set) {
+ tmmbn_to_send_ = *bounding_set;
+ } else {
+ tmmbn_to_send_.clear();
}
- return -1;
+ SetFlag(kRtcpTmmbn, true);
}
void RTCPSender::SetFlag(RTCPPacketType type, bool is_volatile) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index ba6fb700558..02719aa1c50 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -18,6 +18,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/random.h"
#include "webrtc/base/thread_annotations.h"
@@ -134,7 +135,7 @@ class RTCPSender {
void SetMaxPayloadLength(size_t max_payload_length);
- int32_t SetTMMBN(const TMMBRSet* boundingSet);
+ void SetTMMBN(const std::vector<rtcp::TmmbItem>* boundingSet);
int32_t SetApplicationSpecificData(uint8_t subType,
uint32_t name,
@@ -240,6 +241,8 @@ class RTCPSender {
std::vector<uint32_t> remb_ssrcs_ GUARDED_BY(critical_section_rtcp_sender_);
TMMBRHelp tmmbr_help_ GUARDED_BY(critical_section_rtcp_sender_);
+ std::vector<rtcp::TmmbItem> tmmbn_to_send_
+ GUARDED_BY(critical_section_rtcp_sender_);
uint32_t tmmbr_send_ GUARDED_BY(critical_section_rtcp_sender_);
uint32_t packet_oh_send_ GUARDED_BY(critical_section_rtcp_sender_);
size_t max_payload_length_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index dafc2e0be66..a4d6e59c8f8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -8,10 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-/*
- * This file includes unit tests for the RTCPSender.
- */
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -261,9 +258,9 @@ class RtcpSenderTest : public ::testing::Test {
SimulatedClock clock_;
TestTransport test_transport_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
- rtc::scoped_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
- rtc::scoped_ptr<RTCPSender> rtcp_sender_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
+ std::unique_ptr<RTCPSender> rtcp_sender_;
};
TEST_F(RtcpSenderTest, SetRtcpStatus) {
@@ -691,13 +688,14 @@ TEST_F(RtcpSenderTest, TmmbrIncludedInCompoundPacketIfEnabled) {
TEST_F(RtcpSenderTest, SendTmmbn) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kCompound);
- TMMBRSet bounding_set;
- bounding_set.VerifyAndAllocateSet(1);
+ std::vector<rtcp::TmmbItem> bounding_set;
const uint32_t kBitrateKbps = 32768;
const uint32_t kPacketOh = 40;
const uint32_t kSourceSsrc = 12345;
- bounding_set.AddEntry(kBitrateKbps, kPacketOh, kSourceSsrc);
- EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set));
+ const rtcp::TmmbItem tmmbn(kSourceSsrc, kBitrateKbps * 1000, kPacketOh);
+ bounding_set.push_back(tmmbn);
+ rtcp_sender_->SetTMMBN(&bounding_set);
+
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpSr));
EXPECT_EQ(1, parser()->sender_report()->num_packets());
EXPECT_EQ(1, parser()->tmmbn()->num_packets());
@@ -716,8 +714,8 @@ TEST_F(RtcpSenderTest, SendTmmbn) {
// situation where this caused confusion.
TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kCompound);
- TMMBRSet bounding_set;
- EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set));
+ std::vector<rtcp::TmmbItem> bounding_set;
+ rtcp_sender_->SetTMMBN(&bounding_set);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpSr));
EXPECT_EQ(1, parser()->sender_report()->num_packets());
EXPECT_EQ(1, parser()->tmmbn()->num_packets());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
index 9b5a83515f4..c4f688aac44 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -14,12 +14,17 @@
#include <math.h> // ceil
#include <string.h> // memcpy
+#include <limits>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
+namespace {
+constexpr uint64_t kMaxBitrateBps = std::numeric_limits<uint32_t>::max();
+} // namespace
namespace RTCPUtility {
@@ -1440,14 +1445,23 @@ RTCPUtility::RTCPParserV2::ParsePsfbREMBItem()
}
_packet.REMBItem.NumberOfSSRCs = *_ptrRTCPData++;
- const uint8_t brExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ const uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t brMantissa = (_ptrRTCPData[0] & 0x03) << 16;
- brMantissa += (_ptrRTCPData[1] << 8);
- brMantissa += (_ptrRTCPData[2]);
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 16;
+ mantissa += (_ptrRTCPData[1] << 8);
+ mantissa += (_ptrRTCPData[2]);
_ptrRTCPData += 3; // Fwd read data
- _packet.REMBItem.BitRate = (brMantissa << brExp);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled remb bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+ _packet.REMBItem.BitRate = bitrate_bps;
const ptrdiff_t length_ssrcs = _ptrRTCPBlockEnd - _ptrRTCPData;
if (length_ssrcs < 4 * _packet.REMBItem.NumberOfSSRCs)
@@ -1492,18 +1506,28 @@ RTCPUtility::RTCPParserV2::ParseTMMBRItem()
_packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 8;
_packet.TMMBRItem.SSRC += *_ptrRTCPData++;
- uint8_t mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
- mxtbrMantissa += (_ptrRTCPData[1] << 7);
- mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 15;
+ mantissa += (_ptrRTCPData[1] << 7);
+ mantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
uint32_t measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
measuredOH += _ptrRTCPData[3];
_ptrRTCPData += 4; // Fwd read data
- _packet.TMMBRItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled tmmbr bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+
+ _packet.TMMBRItem.MaxTotalMediaBitRate = bitrate_bps / 1000;
_packet.TMMBRItem.MeasuredOverhead = measuredOH;
return true;
@@ -1531,18 +1555,28 @@ RTCPUtility::RTCPParserV2::ParseTMMBNItem()
_packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 8;
_packet.TMMBNItem.SSRC += *_ptrRTCPData++;
- uint8_t mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
- mxtbrMantissa += (_ptrRTCPData[1] << 7);
- mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 15;
+ mantissa += (_ptrRTCPData[1] << 7);
+ mantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
uint32_t measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
measuredOH += _ptrRTCPData[3];
_ptrRTCPData += 4; // Fwd read data
- _packet.TMMBNItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled tmmbn bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+
+ _packet.TMMBNItem.MaxTotalMediaBitRate = bitrate_bps / 1000;
_packet.TMMBNItem.MeasuredOverhead = measuredOH;
return true;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
index 4067a40886f..629de4e99ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
@@ -13,7 +13,8 @@
#include <stddef.h> // size_t, ptrdiff_t
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -468,7 +469,7 @@ class RTCPParserV2 {
RTCPPacketTypes _packetType;
RTCPPacket _packet;
- rtc::scoped_ptr<webrtc::rtcp::RtcpPacket> rtcp_packet_;
+ std::unique_ptr<webrtc::rtcp::RtcpPacket> rtcp_packet_;
};
class RTCPPacketIterator {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
index e32433fe904..88258df8bcd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
@@ -14,6 +14,7 @@
#include <queue>
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
index d29e3d4f212..12c2db564bd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
@@ -72,7 +72,7 @@ void VerifyFua(size_t fua_index,
void TestFua(size_t frame_size,
size_t max_payload_size,
const std::vector<size_t>& expected_sizes) {
- rtc::scoped_ptr<uint8_t[]> frame;
+ std::unique_ptr<uint8_t[]> frame;
frame.reset(new uint8_t[frame_size]);
frame[0] = 0x05; // F=0, NRI=0, Type=5.
for (size_t i = 0; i < frame_size - kNalHeaderSize; ++i) {
@@ -82,11 +82,11 @@ void TestFua(size_t frame_size,
fragmentation.VerifyAndAllocateFragmentationHeader(1);
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = frame_size;
- rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
+ std::unique_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
kRtpVideoH264, max_payload_size, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame.get(), frame_size, &fragmentation);
- rtc::scoped_ptr<uint8_t[]> packet(new uint8_t[max_payload_size]);
+ std::unique_ptr<uint8_t[]> packet(new uint8_t[max_payload_size]);
size_t length = 0;
bool last = false;
size_t offset = kNalHeaderSize;
@@ -156,7 +156,7 @@ TEST(RtpPacketizerH264Test, TestSingleNalu) {
fragmentation.VerifyAndAllocateFragmentationHeader(1);
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = sizeof(frame);
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, sizeof(frame), &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
@@ -185,7 +185,7 @@ TEST(RtpPacketizerH264Test, TestSingleNaluTwoPackets) {
frame[fragmentation.fragmentationOffset[0]] = 0x01;
frame[fragmentation.fragmentationOffset[1]] = 0x01;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -222,7 +222,7 @@ TEST(RtpPacketizerH264Test, TestStapA) {
fragmentation.fragmentationOffset[2] = 4;
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -257,7 +257,7 @@ TEST(RtpPacketizerH264Test, TestTooSmallForStapAHeaders) {
fragmentation.fragmentationOffset[2] = 4;
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -305,7 +305,7 @@ TEST(RtpPacketizerH264Test, TestMixedStapA_FUA) {
frame[nalu_offset + j] = i + j;
}
}
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -394,7 +394,7 @@ class RtpDepacketizerH264Test : public ::testing::Test {
::testing::ElementsAreArray(data, length));
}
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerH264Test, TestSingleNalu) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
index 3bf72e9dd35..e72fe310cfc 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
@@ -12,6 +12,7 @@
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
index 4283a778d00..079d9647545 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
@@ -8,9 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for the VP8 packetizer.
- */
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -421,7 +419,7 @@ class RtpDepacketizerVp8Test : public ::testing::Test {
::testing::ElementsAreArray(data, length));
}
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerVp8Test, BasicHeader) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
index 5bbafe459d2..f9514ad4bdb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
@@ -76,7 +77,7 @@ void ParseAndCheckPacket(const uint8_t* packet,
const RTPVideoHeaderVP9& expected,
size_t expected_hdr_length,
size_t expected_length) {
- rtc::scoped_ptr<RtpDepacketizer> depacketizer(new RtpDepacketizerVp9());
+ std::unique_ptr<RtpDepacketizer> depacketizer(new RtpDepacketizerVp9());
RtpDepacketizer::ParsedPayload parsed;
ASSERT_TRUE(depacketizer->Parse(&parsed, packet, expected_length));
EXPECT_EQ(kRtpVideoVp9, parsed.type.Video.codec);
@@ -127,12 +128,12 @@ class RtpPacketizerVp9Test : public ::testing::Test {
expected_.InitRTPVideoHeaderVP9();
}
- rtc::scoped_ptr<uint8_t[]> packet_;
- rtc::scoped_ptr<uint8_t[]> payload_;
+ std::unique_ptr<uint8_t[]> packet_;
+ std::unique_ptr<uint8_t[]> payload_;
size_t payload_size_;
size_t payload_pos_;
RTPVideoHeaderVP9 expected_;
- rtc::scoped_ptr<RtpPacketizerVp9> packetizer_;
+ std::unique_ptr<RtpPacketizerVp9> packetizer_;
void Init(size_t payload_size, size_t packet_size) {
payload_.reset(new uint8_t[payload_size]);
@@ -469,7 +470,7 @@ class RtpDepacketizerVp9Test : public ::testing::Test {
}
RTPVideoHeaderVP9 expected_;
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerVp9Test, ParseBasicHeader) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
index 8605925785e..2c2a0a13566 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -112,6 +112,14 @@ int32_t RtpHeaderExtensionMap::GetType(const uint8_t id,
return 0;
}
+RTPExtensionType RtpHeaderExtensionMap::GetType(uint8_t id) const {
+ auto it = extensionMap_.find(id);
+ if (it == extensionMap_.end()) {
+ return kInvalidType;
+ }
+ return it->second->type;
+}
+
int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
uint8_t* id) const {
assert(id);
@@ -129,6 +137,14 @@ int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
return -1;
}
+uint8_t RtpHeaderExtensionMap::GetId(RTPExtensionType type) const {
+ for (auto kv : extensionMap_) {
+ if (kv.second->type == type)
+ return kv.first;
+ }
+ return kInvalidId;
+}
+
size_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
// Get length for each extension block.
size_t length = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
index 342e38a1f2a..beaf989c895 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -70,6 +70,8 @@ struct HeaderExtension {
class RtpHeaderExtensionMap {
public:
+ static constexpr RTPExtensionType kInvalidType = kRtpExtensionNone;
+ static constexpr uint8_t kInvalidId = 0;
RtpHeaderExtensionMap();
~RtpHeaderExtensionMap();
@@ -89,8 +91,12 @@ class RtpHeaderExtensionMap {
bool IsRegistered(RTPExtensionType type) const;
int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
+ // Return kInvalidType if not found.
+ RTPExtensionType GetType(uint8_t id) const;
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
+ // Return kInvalidId if not found.
+ uint8_t GetId(RTPExtensionType type) const;
//
// Methods below ignore any inactive rtp header extensions.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
index ca37750621c..0b4f893e2c8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
@@ -8,11 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-/*
- * This file includes unit tests for the RtpHeaderExtensionMap.
- */
-
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
new file mode 100644
index 00000000000..a551b15617b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+// Absolute send time in RTP streams.
+//
+// The absolute send time is signaled to the receiver in-band using the
+// general mechanism for RTP header extensions [RFC5285]. The payload
+// of this extension (the transmitted value) is a 24-bit unsigned integer
+// containing the sender's current time in seconds as a fixed point number
+// with 18 bits fractional part.
+//
+// The form of the absolute send time extension block:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=2 | absolute send time |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* AbsoluteSendTime::kName =
+ "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time";
+bool AbsoluteSendTime::IsSupportedFor(MediaType type) {
+ return true;
+}
+
+bool AbsoluteSendTime::Parse(const uint8_t* data, uint32_t* value) {
+ *value = ByteReader<uint32_t, 3>::ReadBigEndian(data);
+ return true;
+}
+
+bool AbsoluteSendTime::Write(uint8_t* data, int64_t time_ms) {
+ const uint32_t kAbsSendTimeFraction = 18;
+ uint32_t time_24_bits =
+ static_cast<uint32_t>(((time_ms << kAbsSendTimeFraction) + 500) / 1000) &
+ 0x00FFFFFF;
+
+ ByteWriter<uint32_t, 3>::WriteBigEndian(data, time_24_bits);
+ return true;
+}
+
+// An RTP Header Extension for Client-to-Mixer Audio Level Indication
+//
+// https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+//
+// The form of the audio level extension block:
+//
+// 0 1
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=0 |V| level |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+const char* AudioLevel::kName = "urn:ietf:params:rtp-hdrext:ssrc-audio-level";
+bool AudioLevel::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::AUDIO:
+ return true;
+ case MediaType::VIDEO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool AudioLevel::Parse(const uint8_t* data,
+ bool* voice_activity,
+ uint8_t* audio_level) {
+ *voice_activity = (data[0] & 0x80) != 0;
+ *audio_level = data[0] & 0x7F;
+ return true;
+}
+
+bool AudioLevel::Write(uint8_t* data,
+ bool voice_activity,
+ uint8_t audio_level) {
+ RTC_CHECK_LE(audio_level, 0x7f);
+ data[0] = (voice_activity ? 0x80 : 0x00) | audio_level;
+ return true;
+}
+
+// From RFC 5450: Transmission Time Offsets in RTP Streams.
+//
+// The transmission time is signaled to the receiver in-band using the
+// general mechanism for RTP header extensions [RFC5285]. The payload
+// of this extension (the transmitted value) is a 24-bit signed integer.
+// When added to the RTP timestamp of the packet, it represents the
+// "effective" RTP transmission time of the packet, on the RTP
+// timescale.
+//
+// The form of the transmission offset extension block:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=2 | transmission offset |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* TransmissionOffset::kName = "urn:ietf:params:rtp-hdrext:toffset";
+bool TransmissionOffset::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::VIDEO:
+ return true;
+ case MediaType::AUDIO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool TransmissionOffset::Parse(const uint8_t* data, int32_t* value) {
+ *value = ByteReader<int32_t, 3>::ReadBigEndian(data);
+ return true;
+}
+
+bool TransmissionOffset::Write(uint8_t* data, int64_t value) {
+ RTC_CHECK_LE(value, 0x00ffffff);
+ ByteWriter<int32_t, 3>::WriteBigEndian(data, value);
+ return true;
+}
+
+// 0 1 2
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | L=1 |transport wide sequence number |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* TransportSequenceNumber::kName =
+ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions";
+bool TransportSequenceNumber::IsSupportedFor(MediaType type) {
+ return true;
+}
+
+bool TransportSequenceNumber::Parse(const uint8_t* data, uint16_t* value) {
+ *value = ByteReader<uint16_t>::ReadBigEndian(data);
+ return true;
+}
+
+bool TransportSequenceNumber::Write(uint8_t* data, uint16_t value) {
+ ByteWriter<uint16_t>::WriteBigEndian(data, value);
+ return true;
+}
+
+// Coordination of Video Orientation in RTP streams.
+//
+// Coordination of Video Orientation consists in signaling of the current
+// orientation of the image captured on the sender side to the receiver for
+// appropriate rendering and displaying.
+//
+// 0 1
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=0 |0 0 0 0 C F R R|
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* VideoOrientation::kName = "urn:3gpp:video-orientation";
+bool VideoOrientation::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::VIDEO:
+ return true;
+ case MediaType::AUDIO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool VideoOrientation::Parse(const uint8_t* data, VideoRotation* rotation) {
+ *rotation = ConvertCVOByteToVideoRotation(data[0] & 0x03);
+ return true;
+}
+
+bool VideoOrientation::Write(uint8_t* data, VideoRotation rotation) {
+ data[0] = ConvertVideoRotationToCVOByte(rotation);
+ return true;
+}
+
+bool VideoOrientation::Parse(const uint8_t* data, uint8_t* value) {
+ *value = data[0];
+ return true;
+}
+
+bool VideoOrientation::Write(uint8_t* data, uint8_t value) {
+ data[0] = value;
+ return true;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
new file mode 100644
index 00000000000..cdbf806170d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/call.h"
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class AbsoluteSendTime {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionAbsoluteSendTime;
+ static constexpr uint8_t kValueSizeBytes = 3;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, uint32_t* time_ms);
+ static bool Write(uint8_t* data, int64_t time_ms);
+};
+
+class AudioLevel {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionAudioLevel;
+ static constexpr uint8_t kValueSizeBytes = 1;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data,
+ bool* voice_activity,
+ uint8_t* audio_level);
+ static bool Write(uint8_t* data, bool voice_activity, uint8_t audio_level);
+};
+
+class TransmissionOffset {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionTransmissionTimeOffset;
+ static constexpr uint8_t kValueSizeBytes = 3;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, int32_t* time_ms);
+ static bool Write(uint8_t* data, int64_t time_ms);
+};
+
+class TransportSequenceNumber {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionTransportSequenceNumber;
+ static constexpr uint8_t kValueSizeBytes = 2;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, uint16_t* value);
+ static bool Write(uint8_t* data, uint16_t value);
+};
+
+class VideoOrientation {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionVideoRotation;
+ static constexpr uint8_t kValueSizeBytes = 1;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, VideoRotation* value);
+ static bool Write(uint8_t* data, VideoRotation value);
+ static bool Parse(const uint8_t* data, uint8_t* value);
+ static bool Write(uint8_t* data, uint8_t value);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
index d4cbe544cc6..2cec8a3e0f6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -9,10 +9,9 @@
*/
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -30,7 +29,7 @@ class RtpHeaderParserImpl : public RtpHeaderParser {
bool DeregisterRtpHeaderExtension(RTPExtensionType type) override;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_;
+ rtc::CriticalSection critical_section_;
RtpHeaderExtensionMap rtp_header_extension_map_ GUARDED_BY(critical_section_);
};
@@ -38,8 +37,7 @@ RtpHeaderParser* RtpHeaderParser::Create() {
return new RtpHeaderParserImpl;
}
-RtpHeaderParserImpl::RtpHeaderParserImpl()
- : critical_section_(CriticalSectionWrapper::CreateCriticalSection()) {}
+RtpHeaderParserImpl::RtpHeaderParserImpl() {}
bool RtpHeaderParser::IsRtcp(const uint8_t* packet, size_t length) {
RtpUtility::RtpHeaderParser rtp_parser(packet, length);
@@ -54,7 +52,7 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
RtpHeaderExtensionMap map;
{
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
rtp_header_extension_map_.GetCopy(&map);
}
@@ -67,12 +65,12 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RTPExtensionType type,
uint8_t id) {
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
return rtp_header_extension_map_.Register(type, id) == 0;
}
bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RTPExtensionType type) {
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
return rtp_header_extension_map_.Deregister(type) == 0;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
new file mode 100644
index 00000000000..f6634867f68
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
@@ -0,0 +1,509 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+
+#include <cstring>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/random.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtp {
+namespace {
+constexpr size_t kFixedHeaderSize = 12;
+constexpr uint8_t kRtpVersion = 2;
+constexpr uint16_t kOneByteExtensionId = 0xBEDE;
+constexpr size_t kOneByteHeaderSize = 1;
+constexpr size_t kDefaultPacketSize = 1500;
+} // namespace
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P|X| CC |M| PT | sequence number |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | synchronization source (SSRC) identifier |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | Contributing source (CSRC) identifiers |
+// | .... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// |One-byte eXtensions id = 0xbede| length in 32bits |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Extensions |
+// | .... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | Payload |
+// | .... : padding... |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | padding | Padding size |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Packet::Packet(const ExtensionManager* extensions)
+ : extensions_(extensions), buffer_(kDefaultPacketSize) {
+ Clear();
+}
+
+Packet::Packet(const ExtensionManager* extensions, size_t capacity)
+ : extensions_(extensions), buffer_(capacity) {
+ RTC_DCHECK_GE(capacity, kFixedHeaderSize);
+ Clear();
+}
+
+Packet::~Packet() {}
+
+void Packet::IdentifyExtensions(const ExtensionManager* extensions) {
+ RTC_DCHECK(extensions);
+ extensions_ = extensions;
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ uint8_t id = data()[extension_entries_[i].offset - 1] >> 4;
+ extension_entries_[i].type = extensions_->GetType(id);
+ }
+}
+
+bool Packet::Parse(const uint8_t* buffer, size_t buffer_size) {
+ if (!ParseBuffer(buffer, buffer_size)) {
+ Clear();
+ return false;
+ }
+ RTC_DCHECK_EQ(size(), buffer_size);
+ buffer_.SetData(buffer, buffer_size);
+ return true;
+}
+
+bool Packet::Parse(rtc::Buffer buffer) {
+ if (!ParseBuffer(buffer.data(), buffer.size())) {
+ Clear();
+ return false;
+ }
+ RTC_DCHECK_EQ(size(), buffer.size());
+ buffer_ = std::move(buffer);
+ return true;
+}
+
+bool Packet::Marker() const {
+ RTC_DCHECK_EQ(marker_, (data()[1] & 0x80) != 0);
+ return marker_;
+}
+
+uint8_t Packet::PayloadType() const {
+ RTC_DCHECK_EQ(payload_type_, data()[1] & 0x7f);
+ return payload_type_;
+}
+
+uint16_t Packet::SequenceNumber() const {
+ RTC_DCHECK_EQ(sequence_number_,
+ ByteReader<uint16_t>::ReadBigEndian(data() + 2));
+ return sequence_number_;
+}
+
+uint32_t Packet::Timestamp() const {
+ RTC_DCHECK_EQ(timestamp_, ByteReader<uint32_t>::ReadBigEndian(data() + 4));
+ return timestamp_;
+}
+
+uint32_t Packet::Ssrc() const {
+ RTC_DCHECK_EQ(ssrc_, ByteReader<uint32_t>::ReadBigEndian(data() + 8));
+ return ssrc_;
+}
+
+std::vector<uint32_t> Packet::Csrcs() const {
+ size_t num_csrc = data()[0] & 0x0F;
+ RTC_DCHECK_GE(capacity(), kFixedHeaderSize + num_csrc * 4);
+ std::vector<uint32_t> csrcs(num_csrc);
+ for (size_t i = 0; i < num_csrc; ++i) {
+ csrcs[i] =
+ ByteReader<uint32_t>::ReadBigEndian(&data()[kFixedHeaderSize + i * 4]);
+ }
+ return csrcs;
+}
+
+void Packet::GetHeader(RTPHeader* header) const {
+ header->markerBit = Marker();
+ header->payloadType = PayloadType();
+ header->sequenceNumber = SequenceNumber();
+ header->timestamp = Timestamp();
+ header->ssrc = Ssrc();
+ std::vector<uint32_t> csrcs = Csrcs();
+ header->numCSRCs = csrcs.size();
+ for (size_t i = 0; i < csrcs.size(); ++i) {
+ header->arrOfCSRCs[i] = csrcs[i];
+ }
+ header->paddingLength = padding_size();
+ header->headerLength = headers_size();
+ header->payload_type_frequency = 0;
+ header->extension.hasTransmissionTimeOffset =
+ GetExtension<TransmissionOffset>(
+ &header->extension.transmissionTimeOffset);
+ header->extension.hasAbsoluteSendTime =
+ GetExtension<AbsoluteSendTime>(&header->extension.absoluteSendTime);
+ header->extension.hasTransportSequenceNumber =
+ GetExtension<TransportSequenceNumber>(
+ &header->extension.transportSequenceNumber);
+ header->extension.hasAudioLevel = GetExtension<AudioLevel>(
+ &header->extension.voiceActivity, &header->extension.audioLevel);
+ header->extension.hasVideoRotation =
+ GetExtension<VideoOrientation>(&header->extension.videoRotation);
+}
+
+size_t Packet::headers_size() const {
+ return payload_offset_;
+}
+
+size_t Packet::payload_size() const {
+ return payload_size_;
+}
+
+size_t Packet::padding_size() const {
+ return padding_size_;
+}
+
+const uint8_t* Packet::payload() const {
+ return data() + payload_offset_;
+}
+
+size_t Packet::capacity() const {
+ return buffer_.size();
+}
+
+size_t Packet::size() const {
+ return payload_offset_ + payload_size_ + padding_size_;
+}
+
+const uint8_t* Packet::data() const {
+ return buffer_.data();
+}
+
+size_t Packet::FreeCapacity() const {
+ return capacity() - size();
+}
+
+size_t Packet::MaxPayloadSize() const {
+ return capacity() - payload_offset_;
+}
+
+void Packet::CopyHeader(const Packet& packet) {
+ RTC_DCHECK_GE(capacity(), packet.headers_size());
+
+ marker_ = packet.marker_;
+ payload_type_ = packet.payload_type_;
+ sequence_number_ = packet.sequence_number_;
+ timestamp_ = packet.timestamp_;
+ ssrc_ = packet.ssrc_;
+ payload_offset_ = packet.payload_offset_;
+ num_extensions_ = packet.num_extensions_;
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ extension_entries_[i] = packet.extension_entries_[i];
+ }
+ extensions_size_ = packet.extensions_size_;
+ buffer_.SetData(packet.data(), packet.headers_size());
+ // Reset payload and padding.
+ payload_size_ = 0;
+ padding_size_ = 0;
+}
+
+void Packet::SetMarker(bool marker_bit) {
+ marker_ = marker_bit;
+ if (marker_) {
+ WriteAt(1, data()[1] | 0x80);
+ } else {
+ WriteAt(1, data()[1] & 0x7F);
+ }
+}
+
+void Packet::SetPayloadType(uint8_t payload_type) {
+ RTC_DCHECK_LE(payload_type, 0x7Fu);
+ payload_type_ = payload_type;
+ WriteAt(1, (data()[1] & 0x80) | payload_type);
+}
+
+void Packet::SetSequenceNumber(uint16_t seq_no) {
+ sequence_number_ = seq_no;
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(2), seq_no);
+}
+
+void Packet::SetTimestamp(uint32_t timestamp) {
+ timestamp_ = timestamp;
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(4), timestamp);
+}
+
+void Packet::SetSsrc(uint32_t ssrc) {
+ ssrc_ = ssrc;
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(8), ssrc);
+}
+
+void Packet::SetCsrcs(const std::vector<uint32_t>& csrcs) {
+ RTC_DCHECK_EQ(num_extensions_, 0u);
+ RTC_DCHECK_EQ(payload_size_, 0u);
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ RTC_DCHECK_LE(csrcs.size(), 0x0fu);
+ RTC_DCHECK_LE(kFixedHeaderSize + 4 * csrcs.size(), capacity());
+ payload_offset_ = kFixedHeaderSize + 4 * csrcs.size();
+ WriteAt(0, (data()[0] & 0xF0) | csrcs.size());
+ size_t offset = kFixedHeaderSize;
+ for (uint32_t csrc : csrcs) {
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(offset), csrc);
+ offset += 4;
+ }
+}
+
+uint8_t* Packet::AllocatePayload(size_t size_bytes) {
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ if (payload_offset_ + size_bytes > capacity()) {
+ LOG(LS_WARNING) << "Cannot set payload, not enough space in buffer.";
+ return nullptr;
+ }
+ payload_size_ = size_bytes;
+ return WriteAt(payload_offset_);
+}
+
+void Packet::SetPayloadSize(size_t size_bytes) {
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ RTC_DCHECK_LE(size_bytes, payload_size_);
+ payload_size_ = size_bytes;
+}
+
+bool Packet::SetPadding(uint8_t size_bytes, Random* random) {
+ RTC_DCHECK(random);
+ if (payload_offset_ + payload_size_ + size_bytes > capacity()) {
+ LOG(LS_WARNING) << "Cannot set padding size " << size_bytes << ", only "
+ << (capacity() - payload_offset_ - payload_size_)
+ << " bytes left in buffer.";
+ return false;
+ }
+ padding_size_ = size_bytes;
+ if (padding_size_ > 0) {
+ size_t padding_offset = payload_offset_ + payload_size_;
+ size_t padding_end = padding_offset + padding_size_;
+ for (size_t offset = padding_offset; offset < padding_end - 1; ++offset) {
+ WriteAt(offset, random->Rand<uint8_t>());
+ }
+ WriteAt(padding_end - 1, padding_size_);
+ WriteAt(0, data()[0] | 0x20); // Set padding bit.
+ } else {
+ WriteAt(0, data()[0] & ~0x20); // Clear padding bit.
+ }
+ return true;
+}
+
+void Packet::Clear() {
+ marker_ = false;
+ payload_type_ = 0;
+ sequence_number_ = 0;
+ timestamp_ = 0;
+ ssrc_ = 0;
+ payload_offset_ = kFixedHeaderSize;
+ payload_size_ = 0;
+ padding_size_ = 0;
+ num_extensions_ = 0;
+ extensions_size_ = 0;
+
+ memset(WriteAt(0), 0, kFixedHeaderSize);
+ WriteAt(0, kRtpVersion << 6);
+}
+
+bool Packet::ParseBuffer(const uint8_t* buffer, size_t size) {
+ if (size < kFixedHeaderSize) {
+ return false;
+ }
+ const uint8_t version = buffer[0] >> 6;
+ if (version != kRtpVersion) {
+ return false;
+ }
+ const bool has_padding = (buffer[0] & 0x20) != 0;
+ const bool has_extension = (buffer[0] & 0x10) != 0;
+ const uint8_t number_of_crcs = buffer[0] & 0x0f;
+ marker_ = (buffer[1] & 0x80) != 0;
+ payload_type_ = buffer[1] & 0x7f;
+
+ sequence_number_ = ByteReader<uint16_t>::ReadBigEndian(&buffer[2]);
+ timestamp_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[4]);
+ ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[8]);
+ if (size < kFixedHeaderSize + number_of_crcs * 4) {
+ return false;
+ }
+ payload_offset_ = kFixedHeaderSize + number_of_crcs * 4;
+
+ if (has_padding) {
+ padding_size_ = buffer[size - 1];
+ if (padding_size_ == 0) {
+ LOG(LS_WARNING) << "Padding was set, but padding size is zero";
+ return false;
+ }
+ } else {
+ padding_size_ = 0;
+ }
+
+ num_extensions_ = 0;
+ extensions_size_ = 0;
+ if (has_extension) {
+ /* RTP header extension, RFC 3550.
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | defined by profile | length |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | header extension |
+ | .... |
+ */
+ size_t extension_offset = payload_offset_ + 4;
+ if (extension_offset > size) {
+ return false;
+ }
+ uint16_t profile =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[payload_offset_]);
+ size_t extensions_capacity =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[payload_offset_ + 2]);
+ extensions_capacity *= 4;
+ if (extension_offset + extensions_capacity > size) {
+ return false;
+ }
+ if (profile != kOneByteExtensionId) {
+ LOG(LS_WARNING) << "Unsupported rtp extension " << profile;
+ } else {
+ constexpr uint8_t kPaddingId = 0;
+ constexpr uint8_t kReservedId = 15;
+ while (extensions_size_ + kOneByteHeaderSize < extensions_capacity) {
+ uint8_t id = buffer[extension_offset + extensions_size_] >> 4;
+ if (id == kReservedId) {
+ break;
+ } else if (id == kPaddingId) {
+ extensions_size_++;
+ continue;
+ }
+ uint8_t length =
+ 1 + (buffer[extension_offset + extensions_size_] & 0xf);
+ extensions_size_ += kOneByteHeaderSize;
+ if (num_extensions_ >= kMaxExtensionHeaders) {
+ LOG(LS_WARNING) << "Too many extensions.";
+ return false;
+ }
+ extension_entries_[num_extensions_].type =
+ extensions_ ? extensions_->GetType(id)
+ : ExtensionManager::kInvalidType;
+ extension_entries_[num_extensions_].length = length;
+ extension_entries_[num_extensions_].offset =
+ extension_offset + extensions_size_;
+ num_extensions_++;
+ extensions_size_ += length;
+ }
+ }
+ payload_offset_ = extension_offset + extensions_capacity;
+ }
+
+ if (payload_offset_ + padding_size_ > size) {
+ return false;
+ }
+ payload_size_ = size - payload_offset_ - padding_size_;
+ return true;
+}
+
+bool Packet::FindExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) const {
+ RTC_DCHECK(offset);
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ if (extension_entries_[i].type == type) {
+ RTC_CHECK_EQ(length, extension_entries_[i].length)
+ << "Length mismatch for extension '" << type << "'"
+ << "should be " << length << ", received "
+ << extension_entries_[i].length;
+ *offset = extension_entries_[i].offset;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool Packet::AllocateExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) {
+ if (!extensions_) {
+ return false;
+ }
+ if (FindExtension(type, length, offset)) {
+ return true;
+ }
+
+ // Can't add new extension after payload/padding was set.
+ if (payload_size_ > 0) {
+ return false;
+ }
+ if (padding_size_ > 0) {
+ return false;
+ }
+
+ uint8_t extension_id = extensions_->GetId(type);
+ if (extension_id == ExtensionManager::kInvalidId) {
+ return false;
+ }
+ RTC_DCHECK_GT(length, 0u);
+ RTC_DCHECK_LE(length, 16u);
+
+ size_t num_csrc = data()[0] & 0x0F;
+ size_t extensions_offset = kFixedHeaderSize + (num_csrc * 4) + 4;
+ if (extensions_offset + extensions_size_ + kOneByteHeaderSize + length >
+ capacity()) {
+ LOG(LS_WARNING) << "Extension cannot be registered: "
+ "Not enough space left in buffer.";
+ return false;
+ }
+
+ uint16_t new_extensions_size =
+ extensions_size_ + kOneByteHeaderSize + length;
+ uint16_t extensions_words =
+ (new_extensions_size + 3) / 4; // Wrap up to 32bit.
+
+ // All checks passed, write down the extension.
+ if (num_extensions_ == 0) {
+ RTC_DCHECK_EQ(payload_offset_, kFixedHeaderSize + (num_csrc * 4));
+ RTC_DCHECK_EQ(extensions_size_, 0);
+ WriteAt(0, data()[0] | 0x10); // Set extension bit.
+ // Profile specific ID always set to OneByteExtensionHeader.
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(extensions_offset - 4),
+ kOneByteExtensionId);
+ }
+
+ WriteAt(extensions_offset + extensions_size_,
+ (extension_id << 4) | (length - 1));
+ RTC_DCHECK(num_extensions_ < kMaxExtensionHeaders);
+ extension_entries_[num_extensions_].type = type;
+ extension_entries_[num_extensions_].length = length;
+ *offset = extensions_offset + kOneByteHeaderSize + extensions_size_;
+ extension_entries_[num_extensions_].offset = *offset;
+ ++num_extensions_;
+ extensions_size_ = new_extensions_size;
+
+ // Update header length field.
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(extensions_offset - 2),
+ extensions_words);
+ // Fill extension padding place with zeroes.
+ size_t extension_padding_size = 4 * extensions_words - extensions_size_;
+ memset(WriteAt(extensions_offset + extensions_size_), 0,
+ extension_padding_size);
+ payload_offset_ = extensions_offset + 4 * extensions_words;
+ return true;
+}
+
+uint8_t* Packet::WriteAt(size_t offset) {
+ return buffer_.data() + offset;
+}
+
+void Packet::WriteAt(size_t offset, uint8_t byte) {
+ buffer_.data()[offset] = byte;
+}
+
+} // namespace rtp
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h
new file mode 100644
index 00000000000..b2687ca9bab
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+struct RTPHeader;
+class RtpHeaderExtensionMap;
+class Random;
+
+namespace rtp {
+class Packet {
+ public:
+ using ExtensionType = RTPExtensionType;
+ using ExtensionManager = RtpHeaderExtensionMap;
+ static constexpr size_t kMaxExtensionHeaders = 14;
+
+ // Parse and copy given buffer into Packet.
+ bool Parse(const uint8_t* buffer, size_t size);
+
+ // Parse and move given buffer into Packet.
+ bool Parse(rtc::Buffer packet);
+
+ // Maps parsed extensions to their types to allow use of GetExtension.
+ // Used after parsing when |extensions| can't be provided until base rtp
+ // header is parsed.
+ void IdentifyExtensions(const ExtensionManager* extensions);
+
+ // Header.
+ bool Marker() const;
+ uint8_t PayloadType() const;
+ uint16_t SequenceNumber() const;
+ uint32_t Timestamp() const;
+ uint32_t Ssrc() const;
+ std::vector<uint32_t> Csrcs() const;
+
+ // TODO(danilchap): Remove this function when all code update to use RtpPacket
+ // directly. Function is there just for easier backward compatibilty.
+ void GetHeader(RTPHeader* header) const;
+
+ size_t headers_size() const;
+
+ // Payload.
+ size_t payload_size() const;
+ size_t padding_size() const;
+ const uint8_t* payload() const;
+
+ // Buffer.
+ size_t capacity() const;
+ size_t size() const;
+ const uint8_t* data() const;
+ size_t FreeCapacity() const;
+ size_t MaxPayloadSize() const;
+
+ // Reset fields and buffer.
+ void Clear();
+
+ // Header setters.
+ void CopyHeader(const Packet& packet);
+ void SetMarker(bool marker_bit);
+ void SetPayloadType(uint8_t payload_type);
+ void SetSequenceNumber(uint16_t seq_no);
+ void SetTimestamp(uint32_t timestamp);
+ void SetSsrc(uint32_t ssrc);
+
+ // Writes csrc list. Assumes:
+ // a) There is enough room left in buffer.
+ // b) Extension headers, payload or padding data has not already been added.
+ void SetCsrcs(const std::vector<uint32_t>& csrcs);
+
+ // Header extensions.
+ template <typename Extension, typename... Values>
+ bool GetExtension(Values...) const;
+
+ template <typename Extension, typename... Values>
+ bool SetExtension(Values...);
+
+ template <typename Extension>
+ bool ReserveExtension();
+
+ // Reserve size_bytes for payload. Returns nullptr on failure.
+ uint8_t* AllocatePayload(size_t size_bytes);
+ void SetPayloadSize(size_t size_bytes);
+ bool SetPadding(uint8_t size_bytes, Random* random);
+
+ protected:
+ // |extensions| required for SetExtension/ReserveExtension functions during
+ // packet creating and used if available in Parse function.
+ // Adding and getting extensions will fail until |extensions| is
+ // provided via constructor or IdentifyExtensions function.
+ explicit Packet(const ExtensionManager* extensions);
+ Packet(const ExtensionManager* extensions, size_t capacity);
+ virtual ~Packet();
+
+ private:
+ struct ExtensionInfo {
+ ExtensionType type;
+ uint16_t offset;
+ uint8_t length;
+ };
+
+ // Helper function for Parse. Fill header fields using data in given buffer,
+ // but does not touch packet own buffer, leaving packet in invalid state.
+ bool ParseBuffer(const uint8_t* buffer, size_t size);
+
+ // Find an extension based on the type field of the parameter.
+ // If found, length field would be validated, the offset field will be set
+ // and true returned,
+ // otherwise the parameter will be unchanged and false is returned.
+ bool FindExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) const;
+
+ // Find or allocate an extension, based on the type field of the parameter.
+ // If found, the length field be checked against what is already registered
+ // and the offset field will be set, then true is returned. If allocated, the
+ // length field will be used for allocation and the offset update to indicate
+ // position, the true is returned.
+ // If not found and allocations fails, false is returned and parameter remains
+ // unchanged.
+ bool AllocateExtension(ExtensionType type, uint8_t length, uint16_t* offset);
+
+ uint8_t* WriteAt(size_t offset);
+ void WriteAt(size_t offset, uint8_t byte);
+
+ const ExtensionManager* extensions_;
+
+ // Header.
+ bool marker_;
+ uint8_t payload_type_;
+ uint8_t padding_size_;
+ uint16_t sequence_number_;
+ uint32_t timestamp_;
+ uint32_t ssrc_;
+ size_t payload_offset_; // Match header size with csrcs and extensions.
+ size_t payload_size_;
+
+ uint8_t num_extensions_ = 0;
+ ExtensionInfo extension_entries_[kMaxExtensionHeaders];
+ uint16_t extensions_size_ = 0; // Unaligned.
+ rtc::Buffer buffer_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Packet);
+};
+
+template <typename Extension, typename... Values>
+bool Packet::GetExtension(Values... values) const {
+ uint16_t offset = 0;
+ if (!FindExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ return Extension::Parse(data() + offset, values...);
+}
+
+template <typename Extension, typename... Values>
+bool Packet::SetExtension(Values... values) {
+ uint16_t offset = 0;
+ if (!AllocateExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ return Extension::Write(WriteAt(offset), values...);
+}
+
+template <typename Extension>
+bool Packet::ReserveExtension() {
+ uint16_t offset = 0;
+ if (!AllocateExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ memset(WriteAt(offset), 0, Extension::kValueSizeBytes);
+ return true;
+}
+} // namespace rtp
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
index 49f9d8530a9..713fba87707 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -21,7 +21,6 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -29,7 +28,6 @@ static const int kMinPacketRequestBytes = 50;
RTPPacketHistory::RTPPacketHistory(Clock* clock)
: clock_(clock),
- critsect_(CriticalSectionWrapper::CreateCriticalSection()),
store_(false),
prev_index_(0) {}
@@ -38,7 +36,7 @@ RTPPacketHistory::~RTPPacketHistory() {
void RTPPacketHistory::SetStorePacketsStatus(bool enable,
uint16_t number_to_store) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (enable) {
if (store_) {
LOG(LS_WARNING) << "Purging packet history in order to re-set status.";
@@ -70,7 +68,7 @@ void RTPPacketHistory::Free() {
}
bool RTPPacketHistory::StorePackets() const {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
return store_;
}
@@ -78,7 +76,7 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
size_t packet_length,
int64_t capture_time_ms,
StorageType type) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return 0;
}
@@ -131,7 +129,7 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
}
bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return false;
}
@@ -150,7 +148,7 @@ bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
}
bool RTPPacketHistory::SetSent(uint16_t sequence_number) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return false;
}
@@ -176,7 +174,7 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
RTC_CHECK_GE(*packet_length, static_cast<size_t>(IP_PACKET_SIZE));
if (!store_)
return false;
@@ -232,7 +230,7 @@ void RTPPacketHistory::GetPacket(int index,
bool RTPPacketHistory::GetBestFittingPacket(uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_)
return false;
int index = FindBestFittingPacket(*packet_length);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
index 8e1a732b199..b4d48aa2ced 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -15,6 +15,7 @@
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -23,7 +24,6 @@
namespace webrtc {
class Clock;
-class CriticalSectionWrapper;
static const size_t kMaxHistoryCapacity = 9600;
@@ -71,19 +71,19 @@ class RTPPacketHistory {
uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- void Allocate(size_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- void Free() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+ void Allocate(size_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+ void Free() EXCLUSIVE_LOCKS_REQUIRED(critsect_);
void VerifyAndAllocatePacketLength(size_t packet_length, uint32_t start_index)
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
bool FindSeqNum(uint16_t sequence_number, int32_t* index) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
int FindBestFittingPacket(size_t size) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
private:
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ rtc::CriticalSection critsect_;
bool store_ GUARDED_BY(critsect_);
uint32_t prev_index_ GUARDED_BY(critsect_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
index a406d8bc9b4..7580a809235 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -6,8 +6,6 @@
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
- *
- * This file includes unit tests for the RTPPacketHistory.
*/
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h
new file mode 100644
index 00000000000..e2222b9200a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+#include "webrtc/system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+// Class to hold rtp packet with metadata for receiver side.
+class RtpPacketReceived : public rtp::Packet {
+ public:
+ RtpPacketReceived() : Packet(nullptr) {}
+ explicit RtpPacketReceived(const ExtensionManager* extensions)
+ : Packet(extensions) {}
+
+ void GetHeader(RTPHeader* header) const {
+ Packet::GetHeader(header);
+ header->payload_type_frequency = payload_type_frequency();
+ }
+
+ // Time in local time base as close as it can to packet arrived on the
+ // network.
+ int64_t arrival_time_ms() const { return arrival_time_ms_; }
+ void set_arrival_time_ms(int64_t time) { arrival_time_ms_ = time; }
+
+ // Estimated from Timestamp() using rtcp Sender Reports.
+ NtpTime capture_ntp_time() const { return capture_time_; }
+ void set_capture_ntp_time(NtpTime time) { capture_time_ = time; }
+
+ // Flag if packet arrived via rtx.
+ bool retransmit() const { return retransmit_; }
+ void set_retransmit(bool value) { retransmit_ = value; }
+
+ int payload_type_frequency() const { return payload_type_frequency_; }
+ void set_payload_type_frequency(int value) {
+ payload_type_frequency_ = value;
+ }
+
+ private:
+ NtpTime capture_time_;
+ int64_t arrival_time_ms_ = 0;
+ int payload_type_frequency_ = 0;
+ bool retransmit_ = false;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
new file mode 100644
index 00000000000..ad749ffb61e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+
+namespace webrtc {
+// Class to hold rtp packet with metadata for sender side.
+class RtpPacketToSend : public rtp::Packet {
+ public:
+ explicit RtpPacketToSend(const ExtensionManager* extensions)
+ : Packet(extensions) {}
+ RtpPacketToSend(const ExtensionManager* extensions, size_t capacity)
+ : Packet(extensions, capacity) {}
+
+ // Time in local time base as close as it can to frame capture time.
+ int64_t capture_time_ms() const { return capture_time_ms_; }
+ void set_capture_time_ms(int64_t time) { capture_time_ms_ = time; }
+
+ private:
+ int64_t capture_time_ms_ = 0;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
new file mode 100644
index 00000000000..b992d2da909
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
@@ -0,0 +1,252 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
+
+using testing::ElementsAreArray;
+using testing::make_tuple;
+
+namespace webrtc {
+namespace {
+constexpr int8_t kPayloadType = 100;
+constexpr uint32_t kSsrc = 0x12345678;
+constexpr uint16_t kSeqNum = 88;
+constexpr uint32_t kTimestamp = 0x65431278;
+constexpr uint8_t kTransmissionOffsetExtensionId = 1;
+constexpr uint8_t kAudioLevelExtensionId = 9;
+constexpr int32_t kTimeOffset = 0x56ce;
+constexpr bool kVoiceActive = true;
+constexpr uint8_t kAudioLevel = 0x5a;
+constexpr size_t kMaxPaddingSize = 224u;
+constexpr uint8_t kMinimumPacket[] = {
+ 0x80, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78};
+constexpr uint8_t kPacketWithTO[] = {
+ 0x90, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0xbe, 0xde, 0x00, 0x01,
+ 0x12, 0x00, 0x56, 0xce};
+
+constexpr uint8_t kPacketWithTOAndAL[] = {
+ 0x90, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0xbe, 0xde, 0x00, 0x02,
+ 0x12, 0x00, 0x56, 0xce,
+ 0x90, 0x80|kAudioLevel, 0x00, 0x00};
+
+constexpr uint32_t kCsrcs[] = {0x34567890, 0x32435465};
+constexpr uint8_t kPayload[] = {'p', 'a', 'y', 'l', 'o', 'a', 'd'};
+constexpr uint8_t kPacketPaddingSize = 8;
+constexpr uint8_t kPacket[] = {
+ 0xb2, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0x34, 0x56, 0x78, 0x90,
+ 0x32, 0x43, 0x54, 0x65,
+ 0xbe, 0xde, 0x00, 0x01,
+ 0x12, 0x00, 0x56, 0xce,
+ 'p', 'a', 'y', 'l', 'o', 'a', 'd',
+ 'p', 'a', 'd', 'd', 'i', 'n', 'g', kPacketPaddingSize};
+
+} // namespace
+
+TEST(RtpPacketTest, CreateMinimum) {
+ RtpPacketToSend packet(nullptr);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ EXPECT_THAT(kMinimumPacket, ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, CreateWithExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ RtpPacketToSend packet(&extensions);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.SetExtension<TransmissionOffset>(kTimeOffset);
+ EXPECT_THAT(kPacketWithTO, ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, CreateWith2Extensions) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketToSend packet(&extensions);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.SetExtension<TransmissionOffset>(kTimeOffset);
+ packet.SetExtension<AudioLevel>(kVoiceActive, kAudioLevel);
+ EXPECT_THAT(kPacketWithTOAndAL,
+ ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, SetReservedExtensionsAfterPayload) {
+ const size_t kPayloadSize = 4;
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketToSend packet(&extensions);
+
+ EXPECT_TRUE(packet.ReserveExtension<TransmissionOffset>());
+ packet.AllocatePayload(kPayloadSize);
+ // Can't set extension after payload.
+ EXPECT_FALSE(packet.SetExtension<AudioLevel>(kVoiceActive, kAudioLevel));
+ // Unless reserved.
+ EXPECT_TRUE(packet.SetExtension<TransmissionOffset>(kTimeOffset));
+}
+
+TEST(RtpPacketTest, CreatePurePadding) {
+ const size_t kPaddingSize = kMaxPaddingSize - 1;
+ RtpPacketToSend packet(nullptr, 12 + kPaddingSize);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ Random random(0x123456789);
+
+ EXPECT_LT(packet.size(), packet.capacity());
+ EXPECT_FALSE(packet.SetPadding(kPaddingSize + 1, &random));
+ EXPECT_TRUE(packet.SetPadding(kPaddingSize, &random));
+ EXPECT_EQ(packet.size(), packet.capacity());
+}
+
+TEST(RtpPacketTest, CreateUnalignedPadding) {
+ const size_t kPayloadSize = 3; // Make padding start at unaligned address.
+ RtpPacketToSend packet(nullptr, 12 + kPayloadSize + kMaxPaddingSize);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.AllocatePayload(kPayloadSize);
+ Random r(0x123456789);
+
+ EXPECT_LT(packet.size(), packet.capacity());
+ EXPECT_TRUE(packet.SetPadding(kMaxPaddingSize, &r));
+ EXPECT_EQ(packet.size(), packet.capacity());
+}
+
+TEST(RtpPacketTest, ParseMinimum) {
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(kMinimumPacket, sizeof(kMinimumPacket)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_EQ(0u, packet.padding_size());
+ EXPECT_EQ(0u, packet.payload_size());
+}
+
+TEST(RtpPacketTest, ParseBuffer) {
+ rtc::Buffer unparsed(kMinimumPacket);
+ const uint8_t* raw = unparsed.data();
+
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(std::move(unparsed)));
+ EXPECT_EQ(raw, packet.data()); // Expect packet took over the buffer.
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_EQ(0u, packet.padding_size());
+ EXPECT_EQ(0u, packet.payload_size());
+}
+
+TEST(RtpPacketTest, ParseWithExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacketWithTO, sizeof(kPacketWithTO)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ EXPECT_EQ(0u, packet.payload_size());
+ EXPECT_EQ(0u, packet.padding_size());
+}
+
+TEST(RtpPacketTest, ParseWith2Extensions) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacketWithTOAndAL, sizeof(kPacketWithTOAndAL)));
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ bool voice_active;
+ uint8_t audio_level;
+ EXPECT_TRUE(packet.GetExtension<AudioLevel>(&voice_active, &audio_level));
+ EXPECT_EQ(kVoiceActive, voice_active);
+ EXPECT_EQ(kAudioLevel, audio_level);
+}
+
+TEST(RtpPacketTest, ParseWithAllFeatures) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacket, sizeof(kPacket)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_THAT(packet.Csrcs(), ElementsAreArray(kCsrcs));
+ EXPECT_THAT(make_tuple(packet.payload(), packet.payload_size()),
+ ElementsAreArray(kPayload));
+ EXPECT_EQ(kPacketPaddingSize, packet.padding_size());
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+}
+
+TEST(RtpPacketTest, ParseWithExtensionDelayed) {
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(kPacketWithTO, sizeof(kPacketWithTO)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+
+ int32_t time_offset;
+ EXPECT_FALSE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ packet.IdentifyExtensions(&extensions);
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ EXPECT_EQ(0u, packet.payload_size());
+ EXPECT_EQ(0u, packet.padding_size());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
index f3793d0901e..283c2846e1b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -16,8 +16,7 @@
namespace webrtc {
RTPPayloadRegistry::RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- rtp_payload_strategy_(rtp_payload_strategy),
+ : rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
ulpfec_payload_type_(-1),
incoming_payload_type_(-1),
@@ -67,7 +66,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
size_t payload_name_length = strlen(payload_name);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::iterator it =
payload_type_map_.find(payload_type);
@@ -122,7 +121,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
int32_t RTPPayloadRegistry::DeRegisterReceivePayload(
const int8_t payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::iterator it =
payload_type_map_.find(payload_type);
assert(it != payload_type_map_.end());
@@ -176,7 +175,7 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
assert(payload_type);
size_t payload_name_length = strlen(payload_name);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it = payload_type_map_.begin();
@@ -218,12 +217,12 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
}
bool RTPPayloadRegistry::RtxEnabled() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return rtx_;
}
bool RTPPayloadRegistry::IsRtx(const RTPHeader& header) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return IsRtxInternal(header);
}
@@ -231,15 +230,6 @@ bool RTPPayloadRegistry::IsRtxInternal(const RTPHeader& header) const {
return rtx_ && ssrc_rtx_ == header.ssrc;
}
-bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t** restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const {
- return RestoreOriginalPacket(*restored_packet, packet, packet_length,
- original_ssrc, header);
-}
-
bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
const uint8_t* packet,
size_t* packet_length,
@@ -264,7 +254,7 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
original_sequence_number);
ByteWriter<uint32_t>::WriteBigEndian(restored_packet + 8, original_ssrc);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (!rtx_)
return true;
@@ -290,20 +280,20 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
}
void RTPPayloadRegistry::SetRtxSsrc(uint32_t ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
ssrc_rtx_ = ssrc;
rtx_ = true;
}
bool RTPPayloadRegistry::GetRtxSsrc(uint32_t* ssrc) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
*ssrc = ssrc_rtx_;
return rtx_;
}
void RTPPayloadRegistry::SetRtxPayloadType(int payload_type,
int associated_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (payload_type < 0) {
LOG(LS_ERROR) << "Invalid RTX payload type: " << payload_type;
return;
@@ -315,7 +305,7 @@ void RTPPayloadRegistry::SetRtxPayloadType(int payload_type,
}
bool RTPPayloadRegistry::IsRed(const RTPHeader& header) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return red_payload_type_ == header.payloadType;
}
@@ -325,7 +315,7 @@ bool RTPPayloadRegistry::IsEncapsulated(const RTPHeader& header) const {
bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
PayloadUnion* payload) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
@@ -343,13 +333,13 @@ int RTPPayloadRegistry::GetPayloadTypeFrequency(
if (!payload) {
return -1;
}
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return rtp_payload_strategy_->GetPayloadTypeFrequency(*payload);
}
const RtpUtility::Payload* RTPPayloadRegistry::PayloadTypeToPayload(
uint8_t payload_type) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
@@ -363,13 +353,13 @@ const RtpUtility::Payload* RTPPayloadRegistry::PayloadTypeToPayload(
}
void RTPPayloadRegistry::SetIncomingPayloadType(const RTPHeader& header) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (!IsRtxInternal(header))
incoming_payload_type_ = header.payloadType;
}
bool RTPPayloadRegistry::ReportMediaPayloadType(uint8_t media_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (last_received_media_payload_type_ == media_payload_type) {
// Media type unchanged.
return true;
@@ -416,7 +406,8 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(
+ const RtpUtility::Payload& payload) const override {
return payload.typeSpecific.Audio.frequency;
}
};
@@ -466,7 +457,8 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(
+ const RtpUtility::Payload& payload) const override {
return kVideoPayloadTypeFrequency;
}
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index cbded6872d3..5bbe97a32ce 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h"
@@ -58,7 +59,7 @@ class RtpPayloadRegistryTest : public ::testing::Test {
return returned_payload_on_heap;
}
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry_;
testing::NiceMock<MockRTPPayloadStrategy>* mock_payload_strategy_;
};
@@ -296,9 +297,9 @@ void TestRtxPacket(RTPPayloadRegistry* rtp_payload_registry,
uint16_t original_sequence_number = 1234;
uint32_t original_ssrc = 500;
- rtc::scoped_ptr<const uint8_t[]> packet(GenerateRtxPacket(
+ std::unique_ptr<const uint8_t[]> packet(GenerateRtxPacket(
header_length, payload_length, original_sequence_number));
- rtc::scoped_ptr<uint8_t[]> restored_packet(
+ std::unique_ptr<uint8_t[]> restored_packet(
new uint8_t[header_length + payload_length]);
size_t length = original_length;
bool success = rtp_payload_registry->RestoreOriginalPacket(
@@ -312,7 +313,7 @@ void TestRtxPacket(RTPPayloadRegistry* rtp_payload_registry,
EXPECT_EQ(original_length - kRtxHeaderSize, length)
<< "The restored packet should be exactly kRtxHeaderSize smaller.";
- rtc::scoped_ptr<RtpHeaderParser> header_parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> header_parser(RtpHeaderParser::Create());
RTPHeader restored_header;
ASSERT_TRUE(
header_parser->Parse(restored_packet.get(), length, &restored_header));
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index 7a3b6fd8290..38b2830b79c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -16,7 +16,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateAudioStrategy(
@@ -46,26 +45,26 @@ RTPReceiverAudio::RTPReceiverAudio(RtpData* data_callback)
// Outband TelephoneEvent(DTMF) detection
void RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
telephone_event_forward_to_decoder_ = forward_to_decoder;
}
// Is forwarding of outband telephone events turned on/off?
bool RTPReceiverAudio::TelephoneEventForwardToDecoder() const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
return telephone_event_forward_to_decoder_;
}
bool RTPReceiverAudio::TelephoneEventPayloadType(
int8_t payload_type) const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
return telephone_event_payload_type_ == payload_type;
}
bool RTPReceiverAudio::CNGPayloadType(int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
*cng_payload_type_has_changed = false;
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
@@ -152,7 +151,7 @@ int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (RtpUtility::StringCompare(payload_name, "telephone-event", 15)) {
telephone_event_payload_type_ = payload_type;
@@ -194,6 +193,10 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->type.Audio.numEnergy);
}
+ if (first_packet_received_()) {
+ LOG(LS_INFO) << "Received first audio RTP packet";
+ }
+
return ParseAudioCodecSpecific(rtp_header,
payload,
payload_length,
@@ -202,7 +205,7 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
}
int RTPReceiverAudio::GetPayloadTypeFrequency() const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (last_received_g722_) {
return 8000;
}
@@ -245,7 +248,7 @@ void RTPReceiverAudio::CheckPayloadChanged(int8_t payload_type,
}
int RTPReceiverAudio::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
assert(num_energy_ <= kRtpCsrcSize);
@@ -287,7 +290,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
bool telephone_event_packet =
TelephoneEventPayloadType(rtp_header->header.payloadType);
if (telephone_event_packet) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
// RFC 4733 2.3
// 0 1 2 3
@@ -332,7 +335,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
}
{
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (!telephone_event_packet) {
last_received_frequency_ = audio_specific.frequency;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
index b68febbb82b..d5d89bae2d4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -13,7 +13,7 @@
#include <set>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
@@ -22,8 +22,6 @@
namespace webrtc {
-class CriticalSectionWrapper;
-
// Handles audio RTP packets. This class is thread-safe.
class RTPReceiverAudio : public RTPReceiverStrategy,
public TelephoneEventHandler {
@@ -33,15 +31,15 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
// The following three methods implement the TelephoneEventHandler interface.
// Forward DTMFs to decoder for playout.
- void SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
+ void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) override;
// Is forwarding of outband telephone events turned on/off?
- bool TelephoneEventForwardToDecoder() const;
+ bool TelephoneEventForwardToDecoder() const override;
// Is TelephoneEvent configured with payload type payload_type
- bool TelephoneEventPayloadType(const int8_t payload_type) const;
+ bool TelephoneEventPayloadType(const int8_t payload_type) const override;
- TelephoneEventHandler* GetTelephoneEventHandler() { return this; }
+ TelephoneEventHandler* GetTelephoneEventHandler() override { return this; }
// Returns true if CNG is configured with payload type payload_type. If so,
// the frequency and cng_payload_type_has_changed are filled in.
@@ -118,6 +116,8 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
+
+ ThreadUnsafeOneTimeEvent first_packet_received_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index 6f2efe783a1..190449b3ddf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -61,8 +61,6 @@ RtpReceiverImpl::RtpReceiverImpl(
rtp_payload_registry_(rtp_payload_registry),
rtp_media_receiver_(rtp_media_receiver),
cb_rtp_feedback_(incoming_messages_callback),
- critical_section_rtp_receiver_(
- CriticalSectionWrapper::CreateCriticalSection()),
last_receive_time_(0),
last_received_payload_length_(0),
ssrc_(0),
@@ -70,8 +68,7 @@ RtpReceiverImpl::RtpReceiverImpl(
current_remote_csrc_(),
last_received_timestamp_(0),
last_received_frame_time_ms_(-1),
- last_received_sequence_number_(0),
- nack_method_(kNackOff) {
+ last_received_sequence_number_(0) {
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
@@ -89,7 +86,7 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
const uint32_t frequency,
const size_t channels,
const uint32_t rate) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
// TODO(phoglund): Try to streamline handling of the RED codec and some other
// cases which makes it necessary to keep track of whether we created a
@@ -111,29 +108,18 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
int32_t RtpReceiverImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
return rtp_payload_registry_->DeRegisterReceivePayload(payload_type);
}
-NACKMethod RtpReceiverImpl::NACK() const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
- return nack_method_;
-}
-
-// Turn negative acknowledgment requests on/off.
-void RtpReceiverImpl::SetNACKStatus(const NACKMethod method) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
- nack_method_ = method;
-}
-
uint32_t RtpReceiverImpl::SSRC() const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
return ssrc_;
}
// Get remote CSRC.
int32_t RtpReceiverImpl::CSRCs(uint32_t array_of_csrcs[kRtpCsrcSize]) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
assert(num_csrcs_ <= kRtpCsrcSize);
@@ -179,7 +165,7 @@ bool RtpReceiverImpl::IncomingRtpPacket(
bool is_first_packet_in_frame = false;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (HaveReceivedFrame()) {
is_first_packet_in_frame =
last_received_sequence_number_ + 1 == rtp_header.sequenceNumber &&
@@ -198,7 +184,7 @@ bool RtpReceiverImpl::IncomingRtpPacket(
}
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
last_receive_time_ = clock_->TimeInMilliseconds();
last_received_payload_length_ = payload_data_length;
@@ -219,7 +205,7 @@ TelephoneEventHandler* RtpReceiverImpl::GetTelephoneEventHandler() {
}
bool RtpReceiverImpl::Timestamp(uint32_t* timestamp) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!HaveReceivedFrame())
return false;
*timestamp = last_received_timestamp_;
@@ -227,7 +213,7 @@ bool RtpReceiverImpl::Timestamp(uint32_t* timestamp) const {
}
bool RtpReceiverImpl::LastReceivedTimeMs(int64_t* receive_time_ms) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!HaveReceivedFrame())
return false;
*receive_time_ms = last_received_frame_time_ms_;
@@ -247,7 +233,7 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
uint32_t rate = 0;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
@@ -318,7 +304,7 @@ int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
int8_t payload_type = rtp_header.payloadType;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
@@ -401,7 +387,7 @@ void RtpReceiverImpl::CheckCSRC(const WebRtcRTPHeader& rtp_header) {
uint8_t old_num_csrcs = 0;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!rtp_media_receiver_->ShouldReportCsrcChanges(
rtp_header.header.payloadType)) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
index 63b65fefd8e..1ae1c9168a6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
@@ -11,11 +11,12 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -46,11 +47,6 @@ class RtpReceiverImpl : public RtpReceiver {
PayloadUnion payload_specific,
bool in_order) override;
- NACKMethod NACK() const override;
-
- // Turn negative acknowledgement requests on/off.
- void SetNACKStatus(const NACKMethod method) override;
-
// Returns the last received timestamp.
bool Timestamp(uint32_t* timestamp) const override;
bool LastReceivedTimeMs(int64_t* receive_time_ms) const override;
@@ -75,11 +71,11 @@ class RtpReceiverImpl : public RtpReceiver {
Clock* clock_;
RTPPayloadRegistry* rtp_payload_registry_;
- rtc::scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
+ std::unique_ptr<RTPReceiverStrategy> rtp_media_receiver_;
RtpFeedback* cb_rtp_feedback_;
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_;
+ rtc::CriticalSection critical_section_rtp_receiver_;
int64_t last_receive_time_;
size_t last_received_payload_length_;
@@ -91,8 +87,6 @@ class RtpReceiverImpl : public RtpReceiver {
uint32_t last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint16_t last_received_sequence_number_;
-
- NACKMethod nack_method_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
index 3797b1bcc20..69d079f9aa3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
@@ -12,25 +12,22 @@
#include <stdlib.h>
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
namespace webrtc {
RTPReceiverStrategy::RTPReceiverStrategy(RtpData* data_callback)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- data_callback_(data_callback) {
+ : data_callback_(data_callback) {
memset(&last_payload_, 0, sizeof(last_payload_));
}
void RTPReceiverStrategy::GetLastMediaSpecificPayload(
PayloadUnion* payload) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
memcpy(payload, &last_payload_, sizeof(*payload));
}
void RTPReceiverStrategy::SetLastMediaSpecificPayload(
const PayloadUnion& payload) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
memcpy(&last_payload_, &payload, sizeof(last_payload_));
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
index f2a60ff855a..663b883295d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
@@ -11,11 +11,10 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -95,7 +94,7 @@ class RTPReceiverStrategy {
// packet.
explicit RTPReceiverStrategy(RtpData* data_callback);
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
PayloadUnion last_payload_;
RtpData* data_callback_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index 406acc23c20..9d76c1a6163 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -13,6 +13,8 @@
#include <assert.h>
#include <string.h>
+#include <memory>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
@@ -21,7 +23,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -70,8 +71,12 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
: -1;
}
+ if (first_packet_received_()) {
+ LOG(LS_INFO) << "Received first video RTP packet";
+ }
+
// We are not allowed to hold a critical section when calling below functions.
- rtc::scoped_ptr<RtpDepacketizer> depacketizer(
+ std::unique_ptr<RtpDepacketizer> depacketizer(
RtpDepacketizer::Create(rtp_header->type.Video.codec));
if (depacketizer.get() == NULL) {
LOG(LS_ERROR) << "Failed to create depacketizer.";
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
index 56f761a2e1c..486eced3641 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
@@ -34,7 +34,7 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
int64_t timestamp,
bool is_first_packet) override;
- TelephoneEventHandler* GetTelephoneEventHandler() { return NULL; }
+ TelephoneEventHandler* GetTelephoneEventHandler() override { return NULL; }
int GetPayloadTypeFrequency() const override;
@@ -54,6 +54,9 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
const PayloadUnion& specific_payload) const override;
void SetPacketOverHead(uint16_t packet_over_head);
+
+ private:
+ OneTimeEvent first_packet_received_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 5875529cfb2..214472f81ae 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -60,7 +60,8 @@ RtpRtcp::Configuration::Configuration()
send_bitrate_observer(nullptr),
send_frame_count_observer(nullptr),
send_side_delay_observer(nullptr),
- event_log(nullptr) {}
+ event_log(nullptr),
+ send_packet_observer(nullptr) {}
RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
if (configuration.clock) {
@@ -85,7 +86,8 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.send_bitrate_observer,
configuration.send_frame_count_observer,
configuration.send_side_delay_observer,
- configuration.event_log),
+ configuration.event_log,
+ configuration.send_packet_observer),
rtcp_sender_(configuration.audio,
configuration.clock,
configuration.receive_statistics,
@@ -105,14 +107,13 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
last_process_time_(configuration.clock->TimeInMilliseconds()),
last_bitrate_process_time_(configuration.clock->TimeInMilliseconds()),
last_rtt_process_time_(configuration.clock->TimeInMilliseconds()),
- packet_overhead_(28), // IPV4 UDP.
+ packet_overhead_(28), // IPV4 UDP.
nack_last_time_sent_full_(0),
nack_last_time_sent_full_prev_(0),
nack_last_seq_number_sent_(0),
key_frame_req_method_(kKeyFrameReqPliRtcp),
remote_bitrate_(configuration.remote_bitrate_estimator),
rtt_stats_(configuration.rtt_stats),
- critical_section_rtt_(CriticalSectionWrapper::CreateCriticalSection()),
rtt_ms_(0) {
// Make sure that RTCP objects are aware of our SSRC.
uint32_t SSRC = rtp_sender_.SSRC();
@@ -301,30 +302,21 @@ void ModuleRtpRtcpImpl::SetSequenceNumber(const uint16_t seq_num) {
rtp_sender_.SetSequenceNumber(seq_num);
}
-bool ModuleRtpRtcpImpl::SetRtpStateForSsrc(uint32_t ssrc,
- const RtpState& rtp_state) {
- if (rtp_sender_.SSRC() == ssrc) {
- SetStartTimestamp(rtp_state.start_timestamp);
- rtp_sender_.SetRtpState(rtp_state);
- return true;
- }
- if (rtp_sender_.RtxSsrc() == ssrc) {
- rtp_sender_.SetRtxRtpState(rtp_state);
- return true;
- }
- return false;
+void ModuleRtpRtcpImpl::SetRtpState(const RtpState& rtp_state) {
+ SetStartTimestamp(rtp_state.start_timestamp);
+ rtp_sender_.SetRtpState(rtp_state);
}
-bool ModuleRtpRtcpImpl::GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) {
- if (rtp_sender_.SSRC() == ssrc) {
- *rtp_state = rtp_sender_.GetRtpState();
- return true;
- }
- if (rtp_sender_.RtxSsrc() == ssrc) {
- *rtp_state = rtp_sender_.GetRtxRtpState();
- return true;
- }
- return false;
+void ModuleRtpRtcpImpl::SetRtxState(const RtpState& rtp_state) {
+ rtp_sender_.SetRtxRtpState(rtp_state);
+}
+
+RtpState ModuleRtpRtcpImpl::GetRtpState() const {
+ return rtp_sender_.GetRtpState();
+}
+
+RtpState ModuleRtpRtcpImpl::GetRtxState() const {
+ return rtp_sender_.GetRtxRtpState();
}
uint32_t ModuleRtpRtcpImpl::SSRC() const {
@@ -688,8 +680,9 @@ void ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
rtcp_sender_.SetTMMBRStatus(enable);
}
-int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
- return rtcp_sender_.SetTMMBN(bounding_set);
+void ModuleRtpRtcpImpl::SetTMMBN(
+ const std::vector<rtcp::TmmbItem>* bounding_set) {
+ rtcp_sender_.SetTMMBN(bounding_set);
}
// Returns the currently configured retransmission mode.
@@ -982,12 +975,12 @@ void ModuleRtpRtcpImpl::SetRtcpReceiverSsrcs(uint32_t main_ssrc) {
}
void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
- CriticalSectionScoped cs(critical_section_rtt_.get());
+ rtc::CritScope cs(&critical_section_rtt_);
rtt_ms_ = rtt_ms;
}
int64_t ModuleRtpRtcpImpl::rtt_ms() const {
- CriticalSectionScoped cs(critical_section_rtt_.get());
+ rtc::CritScope cs(&critical_section_rtt_);
return rtt_ms_;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 76faca0f7eb..7bbb06e5289 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -16,8 +16,8 @@
#include <utility>
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/gtest_prod_util.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/packet_loss_stats.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
@@ -75,8 +75,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Set SequenceNumber, default is a random number.
void SetSequenceNumber(uint16_t seq) override;
- bool SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) override;
- bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) override;
+ void SetRtpState(const RtpState& rtp_state) override;
+ void SetRtxState(const RtpState& rtp_state) override;
+ RtpState GetRtpState() const override;
+ RtpState GetRtxState() const override;
uint32_t SSRC() const override;
@@ -200,7 +202,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
void SetTMMBRStatus(bool enable) override;
- int32_t SetTMMBN(const TMMBRSet* bounding_set);
+ void SetTMMBN(const std::vector<rtcp::TmmbItem>* bounding_set);
uint16_t MaxPayloadLength() const override;
@@ -362,7 +364,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
PacketLossStats receive_loss_stats_;
// The processed RTT from RtcpRttStats.
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtt_;
+ rtc::CriticalSection critical_section_rtt_;
int64_t rtt_ms_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 708b9af1e09..7e0ac312c80 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <map>
+#include <memory>
#include <set>
#include "testing/gmock/include/gmock/gmock.h"
@@ -68,7 +69,7 @@ class SendTransport : public Transport,
size_t len,
const PacketOptions& options) override {
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
++rtp_packets_sent_;
last_rtp_header_ = header;
@@ -115,10 +116,10 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
RtcpPacketTypeCounter packets_sent_;
RtcpPacketTypeCounter packets_received_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
SendTransport transport_;
RtcpRttStatsTestImpl rtt_stats_;
- rtc::scoped_ptr<ModuleRtpRtcpImpl> impl_;
+ std::unique_ptr<ModuleRtpRtcpImpl> impl_;
uint32_t remote_ssrc_;
void SetRemoteSsrc(uint32_t ssrc) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index 3fbca7b67d8..b58a94d457f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -17,6 +17,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
@@ -24,8 +25,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/modules/rtp_rtcp/source/time_util.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -114,12 +113,11 @@ RTPSender::RTPSender(
BitrateStatisticsObserver* bitrate_callback,
FrameCountObserver* frame_count_observer,
SendSideDelayObserver* send_side_delay_observer,
- RtcEventLog* event_log)
+ RtcEventLog* event_log,
+ SendPacketObserver* send_packet_observer)
: clock_(clock),
- // TODO(holmer): Remove this conversion when we remove the use of
- // TickTime.
- clock_delta_ms_(clock_->TimeInMilliseconds() -
- TickTime::MillisecondTimestamp()),
+ // TODO(holmer): Remove this conversion?
+ clock_delta_ms_(clock_->TimeInMilliseconds() - rtc::TimeMillis()),
random_(clock_->TimeInMicroseconds()),
bitrates_(bitrate_callback),
total_bitrate_sent_(clock, bitrates_.total_bitrate_observer()),
@@ -147,11 +145,11 @@ RTPSender::RTPSender(
nack_bitrate_(clock, bitrates_.retransmit_bitrate_observer()),
packet_history_(clock),
// Statistics
- statistics_crit_(CriticalSectionWrapper::CreateCriticalSection()),
rtp_stats_callback_(NULL),
frame_count_observer_(frame_count_observer),
send_side_delay_observer_(send_side_delay_observer),
event_log_(event_log),
+ send_packet_observer_(send_packet_observer),
// RTP variables
start_timestamp_forced_(false),
start_timestamp_(0),
@@ -166,7 +164,6 @@ RTPSender::RTPSender(
last_packet_marker_bit_(false),
csrcs_(),
rtx_(kRtxOff),
- target_bitrate_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
target_bitrate_(0) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
@@ -210,12 +207,12 @@ RTPSender::~RTPSender() {
}
void RTPSender::SetTargetBitrate(uint32_t bitrate) {
- CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ rtc::CritScope cs(&target_bitrate_critsect_);
target_bitrate_ = bitrate;
}
uint32_t RTPSender::GetTargetBitrate() {
- CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ rtc::CritScope cs(&target_bitrate_critsect_);
return target_bitrate_;
}
@@ -532,7 +529,7 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
payload_size, fragmentation, rtp_hdr);
}
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
// Note: This is currently only counting for video.
if (frame_type == kVideoFrameKey) {
++frame_counts_.key_frames;
@@ -675,13 +672,12 @@ size_t RTPSender::SendPadData(size_t bytes,
UpdateAbsoluteSendTime(padding_packet, length, rtp_header, now_ms);
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(padding_packet, length, rtp_header);
- }
-
- if (using_transport_seq && transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, padding_packet,
+ length, rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
+ }
}
if (!SendPacketToNetwork(padding_packet, length, options))
@@ -886,9 +882,7 @@ bool RTPSender::TimeToSendPacket(uint16_t sequence_number,
// Packet cannot be found. Allow sending to continue.
return true;
}
- if (!retransmission && capture_time_ms > 0) {
- UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds());
- }
+
int rtx;
{
rtc::CritScope lock(&send_critsect_);
@@ -932,19 +926,18 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
diff_ms);
UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
- // TODO(sprang): Potentially too much overhead in IsRegistered()?
- bool using_transport_seq = rtp_header_extension_map_.IsRegistered(
- kRtpExtensionTransportSequenceNumber) &&
- transport_sequence_number_allocator_;
-
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(buffer_to_send_ptr, length, rtp_header);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, buffer_to_send_ptr,
+ length, rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
+ }
}
- if (using_transport_seq && transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (!is_retransmit && !send_over_rtx) {
+ UpdateDelayStatistics(capture_time_ms, now_ms);
+ UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc);
}
bool ret = SendPacketToNetwork(buffer_to_send_ptr, length, options);
@@ -966,7 +959,7 @@ void RTPSender::UpdateRtpStats(const uint8_t* buffer,
// Get ssrc before taking statistics_crit_ to avoid possible deadlock.
uint32_t ssrc = is_rtx ? RtxSsrc() : SSRC();
- CriticalSectionScoped lock(statistics_crit_.get());
+ rtc::CritScope lock(&statistics_crit_);
if (is_rtx) {
counters = &rtx_rtp_stats_;
} else {
@@ -1061,23 +1054,17 @@ int32_t RTPSender::SendToNetwork(uint8_t* buffer,
}
return 0;
}
- if (capture_time_ms > 0) {
- UpdateDelayStatistics(capture_time_ms, now_ms);
- }
-
- // TODO(sprang): Potentially too much overhead in IsRegistered()?
- bool using_transport_seq = rtp_header_extension_map_.IsRegistered(
- kRtpExtensionTransportSequenceNumber) &&
- transport_sequence_number_allocator_;
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(buffer, length, rtp_header);
- if (transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, buffer, length,
+ rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
}
}
+ UpdateDelayStatistics(capture_time_ms, now_ms);
+ UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc);
bool sent = SendPacketToNetwork(buffer, length, options);
@@ -1098,7 +1085,7 @@ int32_t RTPSender::SendToNetwork(uint8_t* buffer,
}
void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
- if (!send_side_delay_observer_)
+ if (!send_side_delay_observer_ || capture_time_ms <= 0)
return;
uint32_t ssrc;
@@ -1109,7 +1096,7 @@ void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
ssrc = ssrc_;
}
{
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
// TODO(holmer): Compute this iteratively instead.
send_delays_[now_ms] = now_ms - capture_time_ms;
send_delays_.erase(send_delays_.begin(),
@@ -1130,6 +1117,15 @@ void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
ssrc);
}
+void RTPSender::UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) {
+ if (!send_packet_observer_ || capture_time_ms <= 0 || packet_id == -1)
+ return;
+
+ send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc);
+}
+
void RTPSender::ProcessBitrate() {
rtc::CritScope lock(&send_critsect_);
total_bitrate_sent_.Process();
@@ -1157,7 +1153,7 @@ uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) {
void RTPSender::GetDataCounters(StreamDataCounters* rtp_stats,
StreamDataCounters* rtx_stats) const {
- CriticalSectionScoped lock(statistics_crit_.get());
+ rtc::CritScope lock(&statistics_crit_);
*rtp_stats = rtp_stats_;
*rtx_stats = rtx_rtp_stats_;
}
@@ -1613,7 +1609,8 @@ void RTPSender::UpdateAbsoluteSendTime(uint8_t* rtp_packet,
ConvertMsTo24Bits(now_ms));
}
-uint16_t RTPSender::UpdateTransportSequenceNumber(
+bool RTPSender::UpdateTransportSequenceNumber(
+ uint16_t sequence_number,
uint8_t* rtp_packet,
size_t rtp_packet_length,
const RTPHeader& rtp_header) const {
@@ -1624,19 +1621,26 @@ uint16_t RTPSender::UpdateTransportSequenceNumber(
rtp_packet_length, rtp_header,
kTransportSequenceNumberLength, &offset)) {
case ExtensionStatus::kNotRegistered:
- return 0;
+ return false;
case ExtensionStatus::kError:
LOG(LS_WARNING) << "Failed to update transport sequence number";
- return 0;
+ return false;
case ExtensionStatus::kOk:
break;
default:
RTC_NOTREACHED();
}
- uint16_t seq = transport_sequence_number_allocator_->AllocateSequenceNumber();
- BuildTransportSequenceNumberExtension(rtp_packet + offset, seq);
- return seq;
+ BuildTransportSequenceNumberExtension(rtp_packet + offset, sequence_number);
+ return true;
+}
+
+bool RTPSender::AllocateTransportSequenceNumber(int* packet_id) const {
+ if (!transport_sequence_number_allocator_)
+ return false;
+
+ *packet_id = transport_sequence_number_allocator_->AllocateSequenceNumber();
+ return true;
}
void RTPSender::SetSendingStatus(bool enabled) {
@@ -1858,12 +1862,12 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,
void RTPSender::RegisterRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
rtp_stats_callback_ = callback;
}
StreamDataCountersCallback* RTPSender::GetRtpStatisticsCallback() const {
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
return rtp_stats_callback_;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index 4344df67451..f501d27a723 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -13,9 +13,11 @@
#include <list>
#include <map>
+#include <memory>
#include <utility>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/random.h"
#include "webrtc/base/thread_annotations.h"
@@ -95,7 +97,9 @@ class RTPSender : public RTPSenderInterface {
BitrateStatisticsObserver* bitrate_callback,
FrameCountObserver* frame_count_observer,
SendSideDelayObserver* send_side_delay_observer,
- RtcEventLog* event_log);
+ RtcEventLog* event_log,
+ SendPacketObserver* send_packet_observer);
+
virtual ~RTPSender();
void ProcessBitrate();
@@ -351,6 +355,9 @@ class RTPSender : public RTPSenderInterface {
const PacketOptions& options);
void UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms);
+ void UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc);
// Find the byte position of the RTP extension as indicated by |type| in
// |rtp_packet|. Return false if such extension doesn't exist.
@@ -368,12 +375,13 @@ class RTPSender : public RTPSenderInterface {
size_t rtp_packet_length,
const RTPHeader& rtp_header,
int64_t now_ms) const;
- // Update the transport sequence number of the packet using a new sequence
- // number allocated by SequenceNumberAllocator. Returns the assigned sequence
- // number, or 0 if extension could not be updated.
- uint16_t UpdateTransportSequenceNumber(uint8_t* rtp_packet,
- size_t rtp_packet_length,
- const RTPHeader& rtp_header) const;
+
+ bool UpdateTransportSequenceNumber(uint16_t sequence_number,
+ uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const RTPHeader& rtp_header) const;
+
+ bool AllocateTransportSequenceNumber(int* packet_id) const;
void UpdateRtpStats(const uint8_t* buffer,
size_t packet_length,
@@ -422,8 +430,8 @@ class RTPSender : public RTPSenderInterface {
Bitrate total_bitrate_sent_;
const bool audio_configured_;
- const rtc::scoped_ptr<RTPSenderAudio> audio_;
- const rtc::scoped_ptr<RTPSenderVideo> video_;
+ const std::unique_ptr<RTPSenderAudio> audio_;
+ const std::unique_ptr<RTPSenderVideo> video_;
RtpPacketSender* const paced_sender_;
TransportSequenceNumberAllocator* const transport_sequence_number_allocator_;
@@ -454,7 +462,7 @@ class RTPSender : public RTPSenderInterface {
RTPPacketHistory packet_history_;
// Statistics
- rtc::scoped_ptr<CriticalSectionWrapper> statistics_crit_;
+ rtc::CriticalSection statistics_crit_;
SendDelayMap send_delays_ GUARDED_BY(statistics_crit_);
FrameCounts frame_counts_ GUARDED_BY(statistics_crit_);
StreamDataCounters rtp_stats_ GUARDED_BY(statistics_crit_);
@@ -463,6 +471,7 @@ class RTPSender : public RTPSenderInterface {
FrameCountObserver* const frame_count_observer_;
SendSideDelayObserver* const send_side_delay_observer_;
RtcEventLog* const event_log_;
+ SendPacketObserver* const send_packet_observer_;
// RTP variables
bool start_timestamp_forced_ GUARDED_BY(send_critsect_);
@@ -489,7 +498,7 @@ class RTPSender : public RTPSenderInterface {
// SetTargetBitrateKbps or GetTargetBitrateKbps. Also remember
// that by the time the function returns there is no guarantee
// that the target bitrate is still valid.
- rtc::scoped_ptr<CriticalSectionWrapper> target_bitrate_critsect_;
+ rtc::CriticalSection target_bitrate_critsect_;
uint32_t target_bitrate_ GUARDED_BY(target_bitrate_critsect_);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSender);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index 804294ac540..4236e1f37d4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -12,10 +12,11 @@
#include <string.h>
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -24,7 +25,6 @@ static const int kDtmfFrequencyHz = 8000;
RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtpSender)
: _clock(clock),
_rtpSender(rtpSender),
- _sendAudioCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_packetSizeSamples(160),
_dtmfEventIsOn(false),
_dtmfEventFirstPacketSent(false),
@@ -53,7 +53,7 @@ int RTPSenderAudio::AudioFrequency() const {
// set audio packet size, used to determine when it's time to send a DTMF packet
// in silence (CNG)
int32_t RTPSenderAudio::SetAudioPacketSize(uint16_t packetSizeSamples) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_packetSizeSamples = packetSizeSamples;
return 0;
@@ -67,7 +67,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
const uint32_t rate,
RtpUtility::Payload** payload) {
if (RtpUtility::StringCompare(payloadName, "cn", 2)) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// we can have multiple CNG payload types
switch (frequency) {
case 8000:
@@ -86,7 +86,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
return -1;
}
} else if (RtpUtility::StringCompare(payloadName, "telephone-event", 15)) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// Don't add it to the list
// we dont want to allow send with a DTMF payloadtype
_dtmfPayloadType = payloadType;
@@ -104,7 +104,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
}
bool RTPSenderAudio::MarkerBit(FrameType frameType, int8_t payload_type) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// for audio true for first packet in a speech burst
bool markerBit = false;
if (_lastPayloadType != payload_type) {
@@ -162,7 +162,7 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
int8_t dtmf_payload_type;
uint16_t packet_size_samples;
{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
red_payload_type = _REDPayloadType;
audio_level_dbov = _audioLevel_dBov;
dtmf_payload_type = _dtmfPayloadType;
@@ -333,8 +333,9 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize);
}
}
+
{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_lastPayloadType = payloadType;
}
// Update audio level extension, if included.
@@ -348,10 +349,14 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp",
_rtpSender->Timestamp(), "seqnum",
_rtpSender->SequenceNumber());
- return _rtpSender->SendToNetwork(dataBuffer, payloadSize, rtpHeaderLength,
- TickTime::MillisecondTimestamp(),
- kAllowRetransmission,
- RtpPacketSender::kHighPriority);
+ int32_t send_result = _rtpSender->SendToNetwork(
+ dataBuffer, payloadSize, rtpHeaderLength,
+ rtc::TimeMillis(), kAllowRetransmission,
+ RtpPacketSender::kHighPriority);
+ if (first_packet_sent_()) {
+ LOG(LS_INFO) << "First audio RTP packet sent to pacer";
+ }
+ return send_result;
}
// Audio level magnitude and voice activity flag are set for each RTP packet
@@ -359,7 +364,7 @@ int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) {
if (level_dBov > 127) {
return -1;
}
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_audioLevel_dBov = level_dBov;
return 0;
}
@@ -369,14 +374,14 @@ int32_t RTPSenderAudio::SetRED(int8_t payloadType) {
if (payloadType < -1) {
return -1;
}
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_REDPayloadType = payloadType;
return 0;
}
// Get payload type for Redundant Audio Data RFC 2198
int32_t RTPSenderAudio::RED(int8_t* payloadType) const {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
if (_REDPayloadType == -1) {
// not configured
return -1;
@@ -390,7 +395,7 @@ int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key,
uint16_t time_ms,
uint8_t level) {
{
- CriticalSectionScoped lock(_sendAudioCritsect.get());
+ rtc::CritScope lock(&_sendAudioCritsect);
if (_dtmfPayloadType < 0) {
// TelephoneEvent payloadtype not configured
return -1;
@@ -445,7 +450,7 @@ int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended,
"Audio::SendTelephoneEvent", "timestamp",
dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber());
retVal = _rtpSender->SendToNetwork(
- dtmfbuffer, 4, 12, TickTime::MillisecondTimestamp(),
+ dtmfbuffer, 4, 12, rtc::TimeMillis(),
kAllowRetransmission, RtpPacketSender::kHighPriority);
sendCount--;
} while (sendCount > 0 && retVal == 0);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index 25c5e4dd88a..4bc0266b7d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -12,6 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
#include "webrtc/common_types.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/source/dtmf_queue.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
@@ -72,7 +74,7 @@ class RTPSenderAudio : public DTMFqueue {
Clock* const _clock;
RTPSender* const _rtpSender;
- rtc::scoped_ptr<CriticalSectionWrapper> _sendAudioCritsect;
+ rtc::CriticalSection _sendAudioCritsect;
uint16_t _packetSizeSamples GUARDED_BY(_sendAudioCritsect);
@@ -100,6 +102,7 @@ class RTPSenderAudio : public DTMFqueue {
// Audio level indication
// (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
uint8_t _audioLevel_dBov GUARDED_BY(_sendAudioCritsect);
+ OneTimeEvent first_packet_sent_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index b7238d26a22..d04ff4d200a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -8,17 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for the RTPSender.
- */
-
#include <list>
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/buffer.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call/mock/mock_rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
@@ -126,6 +122,11 @@ class MockTransportSequenceNumberAllocator
MOCK_METHOD0(AllocateSequenceNumber, uint16_t());
};
+class MockSendPacketObserver : public SendPacketObserver {
+ public:
+ MOCK_METHOD3(OnSendPacket, void(uint16_t, int64_t, uint32_t));
+};
+
class RtpSenderTest : public ::testing::Test {
protected:
RtpSenderTest()
@@ -141,10 +142,10 @@ class RtpSenderTest : public ::testing::Test {
void SetUp() override { SetUpRtpSender(true); }
void SetUpRtpSender(bool pacer) {
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- pacer ? &mock_paced_sender_ : nullptr,
- &seq_num_allocator_, nullptr, nullptr,
- nullptr, nullptr, &mock_rtc_event_log_));
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, pacer ? &mock_paced_sender_ : nullptr,
+ &seq_num_allocator_, nullptr, nullptr, nullptr, nullptr,
+ &mock_rtc_event_log_, &send_packet_observer_));
rtp_sender_->SetSequenceNumber(kSeqNum);
}
@@ -152,7 +153,8 @@ class RtpSenderTest : public ::testing::Test {
MockRtcEventLog mock_rtc_event_log_;
MockRtpPacketSender mock_paced_sender_;
MockTransportSequenceNumberAllocator seq_num_allocator_;
- rtc::scoped_ptr<RTPSender> rtp_sender_;
+ MockSendPacketObserver send_packet_observer_;
+ std::unique_ptr<RTPSender> rtp_sender_;
int payload_;
LoopbackTransportTest transport_;
const bool kMarkerBit;
@@ -189,6 +191,20 @@ class RtpSenderTest : public ::testing::Test {
packet_, payload_length, rtp_length, capture_time_ms,
kAllowRetransmission, RtpPacketSender::kNormalPriority));
}
+
+ void SendGenericPayload() {
+ const uint8_t kPayload[] = {47, 11, 32, 93, 89};
+ const uint32_t kTimestamp = 1234;
+ const uint8_t kPayloadType = 127;
+ const int64_t kCaptureTimeMs = fake_clock_.TimeInMilliseconds();
+ char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
+ EXPECT_EQ(0, rtp_sender_->RegisterPayload(payload_name, kPayloadType, 90000,
+ 0, 1500));
+
+ EXPECT_EQ(0, rtp_sender_->SendOutgoingData(
+ kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs,
+ kPayload, sizeof(kPayload), nullptr));
+ }
};
// TODO(pbos): Move tests over from WithoutPacer to RtpSenderTest as this is our
@@ -206,7 +222,7 @@ class RtpSenderVideoTest : public RtpSenderTest {
rtp_sender_video_.reset(
new RTPSenderVideo(&fake_clock_, rtp_sender_.get()));
}
- rtc::scoped_ptr<RTPSenderVideo> rtp_sender_video_;
+ std::unique_ptr<RTPSenderVideo> rtp_sender_video_;
void VerifyCVOPacket(uint8_t* data,
size_t len,
@@ -483,21 +499,13 @@ TEST_F(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
kRtpExtensionTransportSequenceNumber,
kTransportSequenceNumberExtensionId));
- char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
- const uint8_t payload_type = 127;
- ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
- 0, 1500));
- // Create a dummy payload of 5 bytes.
- uint8_t payload[] = {47, 11, 32, 93, 89};
-
- const uint16_t kTransportSequenceNumber = 17;
EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
.WillOnce(testing::Return(kTransportSequenceNumber));
- const uint32_t kTimestamp = 1234;
- const int64_t kCaptureTimeMs = 4321;
- ASSERT_EQ(0, rtp_sender_->SendOutgoingData(
- kVideoFrameKey, payload_type, kTimestamp, kCaptureTimeMs,
- payload, sizeof(payload), nullptr));
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+
+ SendGenericPayload();
RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
@@ -513,6 +521,19 @@ TEST_F(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
rtp_header.extension.transportSequenceNumber);
}
+TEST_F(RtpSenderTestWithoutPacer, OnSendPacketUpdated) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+
+ SendGenericPayload();
+}
+
// Test CVO header extension is only set when marker bit is true.
TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithVideoRotation_MarkerBit) {
rtp_sender_->SetVideoRotation(kRotation);
@@ -853,7 +874,7 @@ TEST_F(RtpSenderTest, SendPadding) {
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
// Create and set up parser.
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
@@ -951,11 +972,66 @@ TEST_F(RtpSenderTest, SendPadding) {
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
}
+TEST_F(RtpSenderTest, OnSendPacketUpdated) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = false;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
+TEST_F(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(_, _, _)).Times(0);
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = true;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
+TEST_F(RtpSenderTest, OnSendPacketNotUpdatedWithoutSeqNumAllocator) {
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, &mock_paced_sender_,
+ nullptr /* TransportSequenceNumberAllocator */, nullptr, nullptr, nullptr,
+ nullptr, nullptr, &send_packet_observer_));
+ rtp_sender_->SetSequenceNumber(kSeqNum);
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(_, _, _)).Times(0);
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = false;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
TEST_F(RtpSenderTest, SendRedundantPayloads) {
MockTransport transport;
rtp_sender_.reset(new RTPSender(
- false, &fake_clock_, &transport, &mock_paced_sender_, nullptr,
- nullptr, nullptr, nullptr, nullptr, &mock_rtc_event_log_));
+ false, &fake_clock_, &transport, &mock_paced_sender_, nullptr, nullptr,
+ nullptr, nullptr, nullptr, &mock_rtc_event_log_, nullptr));
+
rtp_sender_->SetSequenceNumber(kSeqNum);
rtp_sender_->SetRtxPayloadType(kRtxPayload, kPayload);
@@ -972,7 +1048,7 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
rtp_sender_->SetRtxSsrc(1234);
// Create and set up parser.
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
@@ -1096,9 +1172,9 @@ TEST_F(RtpSenderTest, FrameCountCallbacks) {
FrameCounts frame_counts_;
} callback;
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- &mock_paced_sender_, nullptr, nullptr,
- nullptr, &callback, nullptr, nullptr));
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, &mock_paced_sender_, nullptr, nullptr,
+ nullptr, &callback, nullptr, nullptr, nullptr));
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
const uint8_t payload_type = 127;
@@ -1152,8 +1228,8 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
BitrateStatistics total_stats_;
BitrateStatistics retransmit_stats_;
} callback;
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- nullptr, nullptr, nullptr, &callback, nullptr,
+ rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_, nullptr,
+ nullptr, nullptr, &callback, nullptr, nullptr,
nullptr, nullptr));
// Simulate kNumPackets sent with kPacketInterval ms intervals.
@@ -1205,7 +1281,7 @@ class RtpSenderAudioTest : public RtpSenderTest {
void SetUp() override {
payload_ = kAudioPayload;
- rtp_sender_.reset(new RTPSender(true, &fake_clock_, &transport_,
+ rtp_sender_.reset(new RTPSender(true, &fake_clock_, &transport_, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr));
rtp_sender_->SetSequenceNumber(kSeqNum);
@@ -1297,7 +1373,6 @@ TEST_F(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
fec_params.fec_mask_type = kFecMaskRandom;
fec_params.fec_rate = 1;
fec_params.max_fec_frames = 1;
- fec_params.use_uep_protection = false;
rtp_sender_->SetFecParameters(&fec_params, &fec_params);
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta, payload_type,
1234, 4321, payload,
@@ -1407,7 +1482,7 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kEmptyFrame, payload_type,
capture_time_ms + 2000, 0, nullptr,
0, nullptr));
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
webrtc::RTPHeader rtp_header;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 32ba26f54b8..e10b5b2edab 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -13,6 +13,7 @@
#include <stdlib.h>
#include <string.h>
+#include <memory>
#include <vector>
#include "webrtc/base/checks.h"
@@ -24,21 +25,19 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
enum { REDForFECHeaderLength = 1 };
RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
: _rtpSender(*rtpSender),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_retransmissionSettings(kRetransmitBaseLayer),
// Generic FEC
fec_(),
fec_enabled_(false),
- red_payload_type_(-1),
- fec_payload_type_(-1),
+ red_payload_type_(0),
+ fec_payload_type_(0),
delta_fec_params_(),
key_fec_params_(),
producer_fec_(&fec_),
@@ -113,13 +112,13 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
int64_t capture_time_ms,
StorageType media_packet_storage,
bool protect) {
- rtc::scoped_ptr<RedPacket> red_packet;
+ std::unique_ptr<RedPacket> red_packet;
std::vector<RedPacket*> fec_packets;
StorageType fec_storage = kDontRetransmit;
uint16_t next_fec_sequence_number = 0;
{
// Only protect while creating RED and FEC packets, not when sending.
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
red_packet.reset(producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, red_payload_type_));
if (protect) {
@@ -170,7 +169,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
void RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
fec_enabled_ = enable;
red_payload_type_ = payloadTypeRED;
fec_payload_type_ = payloadTypeFEC;
@@ -184,33 +183,38 @@ void RTPSenderVideo::SetGenericFECStatus(const bool enable,
void RTPSenderVideo::GenericFECStatus(bool* enable,
uint8_t* payloadTypeRED,
uint8_t* payloadTypeFEC) const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
*enable = fec_enabled_;
*payloadTypeRED = red_payload_type_;
*payloadTypeFEC = fec_payload_type_;
}
size_t RTPSenderVideo::FECPacketOverhead() const {
- CriticalSectionScoped cs(crit_.get());
- if (fec_enabled_) {
+ rtc::CritScope cs(&crit_);
+ size_t overhead = 0;
+ if (red_payload_type_ != 0) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
// This reason for the header extensions to be included here is that
// from an FEC viewpoint, they are part of the payload to be protected.
// (The base RTP header is already protected by the FEC header.)
- return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
- (_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
+ overhead = REDForFECHeaderLength + (_rtpSender.RTPHeaderLength() -
+ kRtpHeaderSize);
}
- return 0;
+ if (fec_enabled_)
+ overhead += ForwardErrorCorrection::PacketOverhead();
+ return overhead;
}
void RTPSenderVideo::SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
RTC_DCHECK(delta_params);
RTC_DCHECK(key_params);
- delta_fec_params_ = *delta_params;
- key_fec_params_ = *key_params;
+ if (fec_enabled_) {
+ delta_fec_params_ = *delta_params;
+ key_fec_params_ = *key_params;
+ }
}
int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
@@ -226,19 +230,20 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
return -1;
}
- rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
+ std::unique_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
videoType, _rtpSender.MaxDataPayloadLength(),
video_header ? &(video_header->codecHeader) : nullptr, frameType));
StorageType storage;
- bool fec_enabled;
+ int red_payload_type;
+ bool first_frame = first_frame_sent_();
{
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
FecProtectionParams* fec_params =
frameType == kVideoFrameKey ? &key_fec_params_ : &delta_fec_params_;
producer_fec_.SetFecParameters(fec_params, 0);
storage = packetizer->GetStorageType(_retransmissionSettings);
- fec_enabled = fec_enabled_;
+ red_payload_type = red_payload_type_;
}
// Register CVO rtp header extension at the first time when we receive a frame
@@ -260,6 +265,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
packetizer->SetPayloadData(data, payload_bytes_to_send, frag);
+ bool first = true;
bool last = false;
while (!last) {
uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
@@ -268,6 +274,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
&payload_bytes_in_packet, &last)) {
return -1;
}
+
// Write RTP header.
// Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader(
@@ -299,7 +306,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
_rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header,
video_header->rotation);
}
- if (fec_enabled) {
+ if (red_payload_type != 0) {
SendVideoPacketAsRed(dataBuffer, payload_bytes_in_packet,
rtp_header_length, _rtpSender.SequenceNumber(),
captureTimeStamp, capture_time_ms, storage,
@@ -309,6 +316,18 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
_rtpSender.SequenceNumber(), captureTimeStamp,
capture_time_ms, storage);
}
+
+ if (first_frame) {
+ if (first) {
+ LOG(LS_INFO)
+ << "Sent first RTP packet of the first video frame (pre-pacer)";
+ }
+ if (last) {
+ LOG(LS_INFO)
+ << "Sent last RTP packet of the first video frame (pre-pacer)";
+ }
+ }
+ first = false;
}
TRACE_EVENT_ASYNC_END1(
@@ -330,12 +349,12 @@ uint32_t RTPSenderVideo::FecOverheadRate() const {
}
int RTPSenderVideo::SelectiveRetransmissions() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return _retransmissionSettings;
}
void RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
_retransmissionSettings = settings;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index dc1088a3f7c..8307b83864d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -13,7 +13,8 @@
#include <list>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -27,7 +28,6 @@
#include "webrtc/typedefs.h"
namespace webrtc {
-class CriticalSectionWrapper;
class RTPSenderVideo {
public:
@@ -97,7 +97,7 @@ class RTPSenderVideo {
RTPSenderInterface& _rtpSender;
// Should never be held when calling out of this class.
- const rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ const rtc::CriticalSection crit_;
RtpVideoCodecTypes _videoType;
int32_t _retransmissionSettings GUARDED_BY(crit_);
@@ -116,6 +116,7 @@ class RTPSenderVideo {
Bitrate _fecOverheadRate;
// Bitrate used for video payload and RTP headers
Bitrate _videoBitrate;
+ OneTimeEvent first_frame_sent_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index bdae3c4806f..439cd01a9aa 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -319,6 +319,13 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
return;
}
+ if (ptrRTPDataExtensionEnd - ptr < (len + 1)) {
+ LOG(LS_WARNING) << "Incorrect one-byte extension len: " << (len + 1)
+ << ", bytes left in buffer: "
+ << (ptrRTPDataExtensionEnd - ptr);
+ return;
+ }
+
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
// If we encounter an unknown extension, just skip over it.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
index 1a242853952..474bc6e04e0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
@@ -52,11 +52,6 @@ class RtpHeaderParser {
bool ParseRtcp(RTPHeader* header) const;
bool Parse(RTPHeader* parsedPacket,
RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const;
- RTC_DEPRECATED bool Parse(
- RTPHeader& parsedPacket, // NOLINT(runtime/references)
- RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const {
- return Parse(&parsedPacket, ptrExtensionMap);
- }
private:
void ParseOneByteExtensionHeader(RTPHeader* parsedPacket,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
index f1d1549e279..a96d05db468 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
@@ -9,9 +9,8 @@
*/
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
-
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/checks.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -45,7 +44,7 @@ void SSRCDatabase::ReturnSSRC(uint32_t ssrc) {
ssrcs_.erase(ssrc);
}
-SSRCDatabase::SSRCDatabase() : random_(TickTime::Now().Ticks()) {}
+SSRCDatabase::SSRCDatabase() : random_(rtc::TimeMicros()) {}
SSRCDatabase::~SSRCDatabase() {}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
index da43204b093..43d3a82ab28 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
@@ -67,10 +67,8 @@ void TMMBRSet::ClearEntry(uint32_t idx) {
}
TMMBRHelp::TMMBRHelp()
- : _criticalSection(CriticalSectionWrapper::CreateCriticalSection()),
- _candidateSet(),
+ : _candidateSet(),
_boundingSet(),
- _boundingSetToSend(),
_ptrIntersectionBoundingSet(NULL),
_ptrMaxPRBoundingSet(NULL) {
}
@@ -80,13 +78,12 @@ TMMBRHelp::~TMMBRHelp() {
delete [] _ptrMaxPRBoundingSet;
_ptrIntersectionBoundingSet = 0;
_ptrMaxPRBoundingSet = 0;
- delete _criticalSection;
}
TMMBRSet*
TMMBRHelp::VerifyAndAllocateBoundingSet(uint32_t minimumSize)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if(minimumSize > _boundingSet.capacity())
{
@@ -107,43 +104,10 @@ TMMBRSet* TMMBRHelp::BoundingSet() {
return &_boundingSet;
}
-int32_t
-TMMBRHelp::SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend)
-{
- CriticalSectionScoped lock(_criticalSection);
-
- if (boundingSetToSend == NULL)
- {
- _boundingSetToSend.clearSet();
- return 0;
- }
-
- VerifyAndAllocateBoundingSetToSend(boundingSetToSend->lengthOfSet());
- _boundingSetToSend.clearSet();
- for (uint32_t i = 0; i < boundingSetToSend->lengthOfSet(); i++)
- {
- // cap at our configured max bitrate
- uint32_t bitrate = boundingSetToSend->Tmmbr(i);
- _boundingSetToSend.SetEntry(i, bitrate,
- boundingSetToSend->PacketOH(i),
- boundingSetToSend->Ssrc(i));
- }
- return 0;
-}
-
-int32_t
-TMMBRHelp::VerifyAndAllocateBoundingSetToSend(uint32_t minimumSize)
-{
- CriticalSectionScoped lock(_criticalSection);
-
- _boundingSetToSend.VerifyAndAllocateSet(minimumSize);
- return 0;
-}
-
TMMBRSet*
TMMBRHelp::VerifyAndAllocateCandidateSet(uint32_t minimumSize)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
_candidateSet.VerifyAndAllocateSet(minimumSize);
return &_candidateSet;
@@ -155,16 +119,10 @@ TMMBRHelp::CandidateSet()
return &_candidateSet;
}
-TMMBRSet*
-TMMBRHelp::BoundingSetToSend()
-{
- return &_boundingSetToSend;
-}
-
int32_t
TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
// Work on local variable, will be modified
TMMBRSet candidateSet;
@@ -207,7 +165,7 @@ TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
int32_t
TMMBRHelp::FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
uint32_t numBoundingSet = 0;
VerifyAndAllocateBoundingSet(candidateSet.capacity());
@@ -412,7 +370,7 @@ TMMBRHelp::FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet)
bool TMMBRHelp::IsOwner(const uint32_t ssrc,
const uint32_t length) const {
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if (length == 0) {
// Empty bounding set.
@@ -428,7 +386,7 @@ bool TMMBRHelp::IsOwner(const uint32_t ssrc,
}
bool TMMBRHelp::CalcMinBitRate( uint32_t* minBitrateKbit) const {
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if (_candidateSet.size() == 0) {
// Empty bounding set.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
index 6236d5d43bd..ffafb1409b8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
@@ -12,8 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -63,11 +63,9 @@ public:
TMMBRSet* BoundingSet(); // used for debuging
TMMBRSet* CandidateSet();
- TMMBRSet* BoundingSetToSend();
TMMBRSet* VerifyAndAllocateCandidateSet(const uint32_t minimumSize);
int32_t FindTMMBRBoundingSet(TMMBRSet*& boundingSet);
- int32_t SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend);
bool IsOwner(const uint32_t ssrc, const uint32_t length) const;
@@ -75,15 +73,12 @@ public:
protected:
TMMBRSet* VerifyAndAllocateBoundingSet(uint32_t minimumSize);
- int32_t VerifyAndAllocateBoundingSetToSend(uint32_t minimumSize);
-
int32_t FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet);
private:
- CriticalSectionWrapper* _criticalSection;
+ rtc::CriticalSection _criticalSection;
TMMBRSet _candidateSet;
TMMBRSet _boundingSet;
- TMMBRSet _boundingSetToSend;
float* _ptrIntersectionBoundingSet;
float* _ptrMaxPRBoundingSet;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
index 67e8a65c4df..89c9cbebeba 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
#include <algorithm>
+#include <memory>
#include <vector>
#include "webrtc/test/null_transport.h"
@@ -41,7 +42,7 @@ bool LoopBackTransport::SendRtp(const uint8_t* data,
}
}
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return false;
}
@@ -100,9 +101,9 @@ class RtpRtcpAPITest : public ::testing::Test {
&fake_clock_, NULL, NULL, rtp_payload_registry_.get()));
}
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
- rtc::scoped_ptr<RtpRtcp> module_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpRtcp> module_;
uint32_t test_ssrc_;
uint32_t test_timestamp_;
uint16_t test_sequence_number_;
@@ -151,10 +152,6 @@ TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_TRUE(module_->TMMBR());
module_->SetTMMBRStatus(false);
EXPECT_FALSE(module_->TMMBR());
-
- EXPECT_EQ(kNackOff, rtp_receiver_->NACK());
- rtp_receiver_->SetNACKStatus(kNackRtcp);
- EXPECT_EQ(kNackRtcp, rtp_receiver_->NACK());
}
TEST_F(RtpRtcpAPITest, RtxSender) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
index d8040f79027..44de00a55f9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -11,7 +11,6 @@
#define WEBRTC_MODULES_RTP_RTCP_TEST_TESTAPI_TEST_API_H_
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
index 9b44c4f40db..8069b0950b6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
@@ -9,6 +9,7 @@
*/
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -135,12 +136,12 @@ class RtpRtcpAudioTest : public ::testing::Test {
RtpRtcp* module1;
RtpRtcp* module2;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics1_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics2_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver1_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver2_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics1_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics2_;
+ std::unique_ptr<RtpReceiver> rtp_receiver1_;
+ std::unique_ptr<RtpReceiver> rtp_receiver2_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
VerifyingAudioReceiver* data_receiver1;
VerifyingAudioReceiver* data_receiver2;
LoopBackTransport* transport1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
index d4b36412736..c1359df8646 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -9,6 +9,7 @@
*/
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
@@ -175,14 +176,14 @@ class RtpRtcpRtcpTest : public ::testing::Test {
delete receiver;
}
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback1_;
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback2_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics1_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics2_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver1_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver2_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback1_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback2_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics1_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics2_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
+ std::unique_ptr<RtpReceiver> rtp_receiver1_;
+ std::unique_ptr<RtpReceiver> rtp_receiver2_;
RtpRtcp* module1;
RtpRtcp* module2;
TestRtpReceiver* receiver;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
index 16ea540bd58..d84ff37be7a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -11,6 +11,7 @@
#include <stdlib.h>
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -55,7 +56,6 @@ class RtpRtcpVideoTest : public ::testing::Test {
video_module_->SetRTCPStatus(RtcpMode::kCompound);
video_module_->SetSSRC(test_ssrc_);
- rtp_receiver_->SetNACKStatus(kNackRtcp);
video_module_->SetStorePacketsStatus(true, 600);
EXPECT_EQ(0, video_module_->SetSendingStatus(true));
@@ -127,9 +127,9 @@ class RtpRtcpVideoTest : public ::testing::Test {
}
int test_id_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* video_module_;
LoopBackTransport* transport_;
TestRtpReceiver* receiver_;
@@ -170,7 +170,7 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
kPadSize);
++seq_num;
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(padding_packet, packet_size, &header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
index 466214c740e..b7c4ef5506b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
@@ -45,8 +45,9 @@
#include <math.h>
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
#include "webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -191,7 +192,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int RecoveredMediaPackets(int num_media_packets,
int num_fec_packets,
uint8_t* state) {
- rtc::scoped_ptr<uint8_t[]> state_tmp(
+ std::unique_ptr<uint8_t[]> state_tmp(
new uint8_t[num_media_packets + num_fec_packets]);
memcpy(state_tmp.get(), state, num_media_packets + num_fec_packets);
int num_recovered_packets = 0;
@@ -385,7 +386,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
// (which containes the code size parameters/protection length).
void ComputeMetricsForCode(CodeType code_type,
int code_index) {
- rtc::scoped_ptr<double[]> prob_weight(new double[kNumLossModels]);
+ std::unique_ptr<double[]> prob_weight(new double[kNumLossModels]);
memset(prob_weight.get() , 0, sizeof(double) * kNumLossModels);
MetricsFecCode metrics_code;
SetMetricsZero(&metrics_code);
@@ -393,7 +394,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int num_media_packets = code_params_[code_index].num_media_packets;
int num_fec_packets = code_params_[code_index].num_fec_packets;
int tot_num_packets = num_media_packets + num_fec_packets;
- rtc::scoped_ptr<uint8_t[]> state(new uint8_t[tot_num_packets]);
+ std::unique_ptr<uint8_t[]> state(new uint8_t[tot_num_packets]);
memset(state.get() , 0, tot_num_packets);
int num_loss_configurations = static_cast<int>(pow(2.0f, tot_num_packets));
diff --git a/chromium/third_party/webrtc/modules/utility/OWNERS b/chromium/third_party/webrtc/modules/utility/OWNERS
index 65cb70c9b90..9456ae08c20 100644
--- a/chromium/third_party/webrtc/modules/utility/OWNERS
+++ b/chromium/third_party/webrtc/modules/utility/OWNERS
@@ -1,4 +1,3 @@
-asapersson@webrtc.org
perkj@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
diff --git a/chromium/third_party/webrtc/modules/utility/include/file_recorder.h b/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
index 09ed8ae3507..480a4a97996 100644
--- a/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
+++ b/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
@@ -15,7 +15,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/media_file/media_file_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
@@ -58,8 +57,7 @@ public:
// Write frame to file. Frame should contain 10ms of un-ecoded audio data.
virtual int32_t RecordAudioToFile(
- const AudioFrame& frame,
- const TickTime* playoutTS = NULL) = 0;
+ const AudioFrame& frame) = 0;
// Open/create the file specified by fileName for writing audio/video data
// (relative path is allowed). audioCodecInst specifies the encoding of the
diff --git a/chromium/third_party/webrtc/modules/utility/include/jvm_android.h b/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
index f527dff6322..574c977cd04 100644
--- a/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
+++ b/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
@@ -12,9 +12,10 @@
#define WEBRTC_MODULES_UTILITY_INCLUDE_JVM_ANDROID_H_
#include <jni.h>
+
+#include <memory>
#include <string>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/utility/include/helpers_android.h"
@@ -76,7 +77,7 @@ class NativeRegistration : public JavaClass {
NativeRegistration(JNIEnv* jni, jclass clazz);
~NativeRegistration();
- rtc::scoped_ptr<GlobalRef> NewObject(
+ std::unique_ptr<GlobalRef> NewObject(
const char* name, const char* signature, ...);
private:
@@ -96,7 +97,7 @@ class JNIEnvironment {
// Note that the class name must be one of the names in the static
// |loaded_classes| array defined in jvm_android.cc.
// This method must be called on the construction thread.
- rtc::scoped_ptr<NativeRegistration> RegisterNatives(
+ std::unique_ptr<NativeRegistration> RegisterNatives(
const char* name, const JNINativeMethod *methods, int num_methods);
// Converts from Java string to std::string.
@@ -120,9 +121,9 @@ class JNIEnvironment {
// webrtc::JVM::Initialize(jvm, context);
//
// // Header (.h) file of example class called User.
-// rtc::scoped_ptr<JNIEnvironment> env;
-// rtc::scoped_ptr<NativeRegistration> reg;
-// rtc::scoped_ptr<GlobalRef> obj;
+// std::unique_ptr<JNIEnvironment> env;
+// std::unique_ptr<NativeRegistration> reg;
+// std::unique_ptr<GlobalRef> obj;
//
// // Construction (in .cc file) of User class.
// User::User() {
@@ -156,7 +157,7 @@ class JVM {
// Creates a JNIEnvironment object.
// This method returns a NULL pointer if AttachCurrentThread() has not been
// called successfully. Use the AttachCurrentThreadIfNeeded class if needed.
- rtc::scoped_ptr<JNIEnvironment> environment();
+ std::unique_ptr<JNIEnvironment> environment();
// Returns a JavaClass object given class |name|.
// Note that the class name must be one of the names in the static
diff --git a/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h b/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
index 9560e408e87..621fcee8182 100644
--- a/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
@@ -21,6 +21,10 @@ namespace webrtc {
class MockProcessThread : public ProcessThread {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
MOCK_METHOD0(Start, void());
MOCK_METHOD0(Stop, void());
MOCK_METHOD1(WakeUp, void(Module* module));
@@ -31,7 +35,7 @@ class MockProcessThread : public ProcessThread {
// MOCK_METHOD1 gets confused with mocking this method, so we work around it
// by overriding the method from the interface and forwarding the call to a
// mocked, simpler method.
- void PostTask(rtc::scoped_ptr<ProcessTask> task) override {
+ void PostTask(std::unique_ptr<ProcessTask> task) /* override */ {
PostTask(task.get());
}
};
diff --git a/chromium/third_party/webrtc/modules/utility/include/process_thread.h b/chromium/third_party/webrtc/modules/utility/include/process_thread.h
index 285a5ea5876..f6913ea3167 100644
--- a/chromium/third_party/webrtc/modules/utility/include/process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/include/process_thread.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
#define WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
+#include <memory>
+
#include "webrtc/typedefs.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
class Module;
@@ -29,7 +30,7 @@ class ProcessThread {
public:
virtual ~ProcessThread();
- static rtc::scoped_ptr<ProcessThread> Create(const char* thread_name);
+ static std::unique_ptr<ProcessThread> Create(const char* thread_name);
// Starts the worker thread. Must be called from the construction thread.
virtual void Start() = 0;
@@ -50,7 +51,7 @@ class ProcessThread {
// construction thread of the ProcessThread instance, if the task did not
// get a chance to run (e.g. posting the task while shutting down or when
// the thread never runs).
- virtual void PostTask(rtc::scoped_ptr<ProcessTask> task) = 0;
+ virtual void PostTask(std::unique_ptr<ProcessTask> task) = 0;
// Adds a module that will start to receive callbacks on the worker thread.
// Can be called from any thread.
diff --git a/chromium/third_party/webrtc/modules/utility/source/coder.cc b/chromium/third_party/webrtc/modules/utility/source/coder.cc
index 1476e02d9c5..3c065e7c2be 100644
--- a/chromium/third_party/webrtc/modules/utility/source/coder.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/coder.cc
@@ -13,96 +13,101 @@
#include "webrtc/modules/utility/source/coder.h"
namespace webrtc {
-AudioCoder::AudioCoder(uint32_t instanceID)
- : _acm(AudioCodingModule::Create(instanceID)),
- _receiveCodec(),
- _encodeTimestamp(0),
- _encodedData(NULL),
- _encodedLengthInBytes(0),
- _decodeTimestamp(0)
-{
- _acm->InitializeReceiver();
- _acm->RegisterTransportCallback(this);
+namespace {
+AudioCodingModule::Config GetAcmConfig(uint32_t id) {
+ AudioCodingModule::Config config;
+ // This class does not handle muted output.
+ config.neteq_config.enable_muted_state = false;
+ config.id = id;
+ return config;
}
+} // namespace
-AudioCoder::~AudioCoder()
-{
+AudioCoder::AudioCoder(uint32_t instance_id)
+ : acm_(AudioCodingModule::Create(GetAcmConfig(instance_id))),
+ receive_codec_(),
+ encode_timestamp_(0),
+ encoded_data_(nullptr),
+ encoded_length_in_bytes_(0),
+ decode_timestamp_(0) {
+ acm_->InitializeReceiver();
+ acm_->RegisterTransportCallback(this);
}
-int32_t AudioCoder::SetEncodeCodec(const CodecInst& codecInst) {
- const bool success = codec_manager_.RegisterEncoder(codecInst) &&
- codec_manager_.MakeEncoder(&rent_a_codec_, _acm.get());
+AudioCoder::~AudioCoder() {}
+
+int32_t AudioCoder::SetEncodeCodec(const CodecInst& codec_inst) {
+ const bool success = codec_manager_.RegisterEncoder(codec_inst) &&
+ codec_manager_.MakeEncoder(&rent_a_codec_, acm_.get());
return success ? 0 : -1;
}
-int32_t AudioCoder::SetDecodeCodec(const CodecInst& codecInst) {
- if (_acm->RegisterReceiveCodec(
- codecInst, [&] { return rent_a_codec_.RentIsacDecoder(); }) == -1) {
+int32_t AudioCoder::SetDecodeCodec(const CodecInst& codec_inst) {
+ if (acm_->RegisterReceiveCodec(
+ codec_inst, [&] { return rent_a_codec_.RentIsacDecoder(); }) == -1) {
return -1;
}
- memcpy(&_receiveCodec, &codecInst, sizeof(CodecInst));
+ memcpy(&receive_codec_, &codec_inst, sizeof(CodecInst));
return 0;
}
-int32_t AudioCoder::Decode(AudioFrame& decodedAudio,
- uint32_t sampFreqHz,
- const int8_t* incomingPayload,
- size_t payloadLength)
-{
- if (payloadLength > 0)
- {
- const uint8_t payloadType = _receiveCodec.pltype;
- _decodeTimestamp += _receiveCodec.pacsize;
- if(_acm->IncomingPayload((const uint8_t*) incomingPayload,
- payloadLength,
- payloadType,
- _decodeTimestamp) == -1)
- {
- return -1;
- }
+int32_t AudioCoder::Decode(AudioFrame& decoded_audio,
+ uint32_t samp_freq_hz,
+ const int8_t* incoming_payload,
+ size_t payload_length) {
+ if (payload_length > 0) {
+ const uint8_t payload_type = receive_codec_.pltype;
+ decode_timestamp_ += receive_codec_.pacsize;
+ if (acm_->IncomingPayload((const uint8_t*)incoming_payload, payload_length,
+ payload_type, decode_timestamp_) == -1) {
+ return -1;
}
- return _acm->PlayoutData10Ms((uint16_t)sampFreqHz, &decodedAudio);
+ }
+ bool muted;
+ int32_t ret =
+ acm_->PlayoutData10Ms((uint16_t)samp_freq_hz, &decoded_audio, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
}
-int32_t AudioCoder::PlayoutData(AudioFrame& decodedAudio,
- uint16_t& sampFreqHz)
-{
- return _acm->PlayoutData10Ms(sampFreqHz, &decodedAudio);
+int32_t AudioCoder::PlayoutData(AudioFrame& decoded_audio,
+ uint16_t& samp_freq_hz) {
+ bool muted;
+ int32_t ret = acm_->PlayoutData10Ms(samp_freq_hz, &decoded_audio, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
}
int32_t AudioCoder::Encode(const AudioFrame& audio,
- int8_t* encodedData,
- size_t& encodedLengthInBytes)
-{
- // Fake a timestamp in case audio doesn't contain a correct timestamp.
- // Make a local copy of the audio frame since audio is const
- AudioFrame audioFrame;
- audioFrame.CopyFrom(audio);
- audioFrame.timestamp_ = _encodeTimestamp;
- _encodeTimestamp += static_cast<uint32_t>(audioFrame.samples_per_channel_);
+ int8_t* encoded_data,
+ size_t& encoded_length_in_bytes) {
+ // Fake a timestamp in case audio doesn't contain a correct timestamp.
+ // Make a local copy of the audio frame since audio is const
+ AudioFrame audio_frame;
+ audio_frame.CopyFrom(audio);
+ audio_frame.timestamp_ = encode_timestamp_;
+ encode_timestamp_ += static_cast<uint32_t>(audio_frame.samples_per_channel_);
- // For any codec with a frame size that is longer than 10 ms the encoded
- // length in bytes should be zero until a a full frame has been encoded.
- _encodedLengthInBytes = 0;
- if(_acm->Add10MsData((AudioFrame&)audioFrame) == -1)
- {
- return -1;
- }
- _encodedData = encodedData;
- encodedLengthInBytes = _encodedLengthInBytes;
- return 0;
+ // For any codec with a frame size that is longer than 10 ms the encoded
+ // length in bytes should be zero until a a full frame has been encoded.
+ encoded_length_in_bytes_ = 0;
+ if (acm_->Add10MsData((AudioFrame&)audio_frame) == -1) {
+ return -1;
+ }
+ encoded_data_ = encoded_data;
+ encoded_length_in_bytes = encoded_length_in_bytes_;
+ return 0;
}
-int32_t AudioCoder::SendData(
- FrameType /* frameType */,
- uint8_t /* payloadType */,
- uint32_t /* timeStamp */,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* /* fragmentation*/)
-{
- memcpy(_encodedData,payloadData,sizeof(uint8_t) * payloadSize);
- _encodedLengthInBytes = payloadSize;
- return 0;
+int32_t AudioCoder::SendData(FrameType /* frame_type */,
+ uint8_t /* payload_type */,
+ uint32_t /* time_stamp */,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* /* fragmentation*/) {
+ memcpy(encoded_data_, payload_data, sizeof(uint8_t) * payload_size);
+ encoded_length_in_bytes_ = payload_size;
+ return 0;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/coder.h b/chromium/third_party/webrtc/modules/utility/source/coder.h
index 9536a027d0e..5f441904bee 100644
--- a/chromium/third_party/webrtc/modules/utility/source/coder.h
+++ b/chromium/third_party/webrtc/modules/utility/source/coder.h
@@ -22,45 +22,47 @@
namespace webrtc {
class AudioFrame;
-class AudioCoder : public AudioPacketizationCallback
-{
-public:
- AudioCoder(uint32_t instanceID);
- ~AudioCoder();
+class AudioCoder : public AudioPacketizationCallback {
+ public:
+ AudioCoder(uint32_t instance_id);
+ ~AudioCoder();
- int32_t SetEncodeCodec(const CodecInst& codecInst);
+ int32_t SetEncodeCodec(const CodecInst& codec_inst);
- int32_t SetDecodeCodec(const CodecInst& codecInst);
+ int32_t SetDecodeCodec(const CodecInst& codec_inst);
- int32_t Decode(AudioFrame& decodedAudio, uint32_t sampFreqHz,
- const int8_t* incomingPayload, size_t payloadLength);
+ int32_t Decode(AudioFrame& decoded_audio,
+ uint32_t samp_freq_hz,
+ const int8_t* incoming_payload,
+ size_t payload_length);
- int32_t PlayoutData(AudioFrame& decodedAudio, uint16_t& sampFreqHz);
+ int32_t PlayoutData(AudioFrame& decoded_audio, uint16_t& samp_freq_hz);
- int32_t Encode(const AudioFrame& audio, int8_t* encodedData,
- size_t& encodedLengthInBytes);
+ int32_t Encode(const AudioFrame& audio,
+ int8_t* encoded_data,
+ size_t& encoded_length_in_bytes);
-protected:
- int32_t SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* fragmentation) override;
+ protected:
+ int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t time_stamp,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) override;
-private:
- std::unique_ptr<AudioCodingModule> _acm;
- acm2::CodecManager codec_manager_;
- acm2::RentACodec rent_a_codec_;
+ private:
+ std::unique_ptr<AudioCodingModule> acm_;
+ acm2::CodecManager codec_manager_;
+ acm2::RentACodec rent_a_codec_;
- CodecInst _receiveCodec;
+ CodecInst receive_codec_;
- uint32_t _encodeTimestamp;
- int8_t* _encodedData;
- size_t _encodedLengthInBytes;
+ uint32_t encode_timestamp_;
+ int8_t* encoded_data_;
+ size_t encoded_length_in_bytes_;
- uint32_t _decodeTimestamp;
+ uint32_t decode_timestamp_;
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
index beb6379ff0c..62887da13b8 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
@@ -19,7 +19,6 @@
#include "webrtc/modules/utility/include/file_player.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
index 88b20eeac2a..b0a766f22e1 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -130,8 +130,7 @@ bool FileRecorderImpl::IsRecording() const
}
int32_t FileRecorderImpl::RecordAudioToFile(
- const AudioFrame& incomingAudioFrame,
- const TickTime* playoutTS)
+ const AudioFrame& incomingAudioFrame)
{
if (codec_info_.plfreq == 0)
{
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
index 697d7593757..96f811d49ea 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
@@ -27,7 +27,6 @@
#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -45,23 +44,21 @@ public:
virtual ~FileRecorderImpl();
// FileRecorder functions.
- virtual int32_t RegisterModuleFileCallback(FileCallback* callback);
- virtual FileFormats RecordingFileFormat() const;
- virtual int32_t StartRecordingAudioFile(
+ int32_t RegisterModuleFileCallback(FileCallback* callback) override;
+ FileFormats RecordingFileFormat() const override;
+ int32_t StartRecordingAudioFile(
const char* fileName,
const CodecInst& codecInst,
uint32_t notificationTimeMs) override;
- virtual int32_t StartRecordingAudioFile(
+ int32_t StartRecordingAudioFile(
OutStream& destStream,
const CodecInst& codecInst,
uint32_t notificationTimeMs) override;
- virtual int32_t StopRecording();
- virtual bool IsRecording() const;
- virtual int32_t codec_info(CodecInst& codecInst) const;
- virtual int32_t RecordAudioToFile(
- const AudioFrame& frame,
- const TickTime* playoutTS = NULL);
- virtual int32_t StartRecordingVideoFile(
+ int32_t StopRecording() override;
+ bool IsRecording() const override;
+ int32_t codec_info(CodecInst& codecInst) const override;
+ int32_t RecordAudioToFile(const AudioFrame& frame) override;
+ int32_t StartRecordingVideoFile(
const char* fileName,
const CodecInst& audioCodecInst,
const VideoCodec& videoCodecInst,
@@ -69,7 +66,7 @@ public:
{
return -1;
}
- virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) {
+ int32_t RecordVideoToFile(const VideoFrame& videoFrame) override {
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc b/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
index eb37fda0405..d53d1b5eadf 100644
--- a/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
@@ -10,6 +10,8 @@
#include <android/log.h>
+#include <memory>
+
#include "webrtc/modules/utility/include/jvm_android.h"
#include "webrtc/base/checks.h"
@@ -139,7 +141,7 @@ NativeRegistration::~NativeRegistration() {
CHECK_EXCEPTION(jni_) << "Error during UnregisterNatives";
}
-rtc::scoped_ptr<GlobalRef> NativeRegistration::NewObject(
+std::unique_ptr<GlobalRef> NativeRegistration::NewObject(
const char* name, const char* signature, ...) {
ALOGD("NativeRegistration::NewObject%s", GetThreadInfo().c_str());
va_list args;
@@ -149,7 +151,7 @@ rtc::scoped_ptr<GlobalRef> NativeRegistration::NewObject(
args);
CHECK_EXCEPTION(jni_) << "Error during NewObjectV";
va_end(args);
- return rtc::scoped_ptr<GlobalRef>(new GlobalRef(jni_, obj));
+ return std::unique_ptr<GlobalRef>(new GlobalRef(jni_, obj));
}
// JavaClass implementation.
@@ -181,14 +183,14 @@ JNIEnvironment::~JNIEnvironment() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
}
-rtc::scoped_ptr<NativeRegistration> JNIEnvironment::RegisterNatives(
+std::unique_ptr<NativeRegistration> JNIEnvironment::RegisterNatives(
const char* name, const JNINativeMethod *methods, int num_methods) {
ALOGD("JNIEnvironment::RegisterNatives(%s)", name);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
jclass clazz = LookUpClass(name);
jni_->RegisterNatives(clazz, methods, num_methods);
CHECK_EXCEPTION(jni_) << "Error during RegisterNatives";
- return rtc::scoped_ptr<NativeRegistration>(
+ return std::unique_ptr<NativeRegistration>(
new NativeRegistration(jni_, clazz));
}
@@ -240,7 +242,7 @@ JVM::~JVM() {
DeleteGlobalRef(jni(), context_);
}
-rtc::scoped_ptr<JNIEnvironment> JVM::environment() {
+std::unique_ptr<JNIEnvironment> JVM::environment() {
ALOGD("JVM::environment%s", GetThreadInfo().c_str());
// The JNIEnv is used for thread-local storage. For this reason, we cannot
// share a JNIEnv between threads. If a piece of code has no other way to get
@@ -250,9 +252,9 @@ rtc::scoped_ptr<JNIEnvironment> JVM::environment() {
JNIEnv* jni = GetEnv(jvm_);
if (!jni) {
ALOGE("AttachCurrentThread() has not been called on this thread.");
- return rtc::scoped_ptr<JNIEnvironment>();
+ return std::unique_ptr<JNIEnvironment>();
}
- return rtc::scoped_ptr<JNIEnvironment>(new JNIEnvironment(jni));
+ return std::unique_ptr<JNIEnvironment>(new JNIEnvironment(jni));
}
JavaClass JVM::GetClass(const char* name) {
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
index 8cdf01634cb..4e3606ca08f 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
@@ -11,9 +11,9 @@
#include "webrtc/modules/utility/source/process_thread_impl.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -36,9 +36,9 @@ int64_t GetNextCallbackTime(Module* module, int64_t time_now) {
ProcessThread::~ProcessThread() {}
// static
-rtc::scoped_ptr<ProcessThread> ProcessThread::Create(
+std::unique_ptr<ProcessThread> ProcessThread::Create(
const char* thread_name) {
- return rtc::scoped_ptr<ProcessThread>(new ProcessThreadImpl(thread_name));
+ return std::unique_ptr<ProcessThread>(new ProcessThreadImpl(thread_name));
}
ProcessThreadImpl::ProcessThreadImpl(const char* thread_name)
@@ -119,7 +119,7 @@ void ProcessThreadImpl::WakeUp(Module* module) {
wake_up_->Set();
}
-void ProcessThreadImpl::PostTask(rtc::scoped_ptr<ProcessTask> task) {
+void ProcessThreadImpl::PostTask(std::unique_ptr<ProcessTask> task) {
// Allowed to be called on any thread.
{
rtc::CritScope lock(&lock_);
@@ -188,7 +188,7 @@ bool ProcessThreadImpl::Run(void* obj) {
}
bool ProcessThreadImpl::Process() {
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
int64_t next_checkpoint = now + (1000 * 60);
{
@@ -209,7 +209,7 @@ bool ProcessThreadImpl::Process() {
// Use a new 'now' reference to calculate when the next callback
// should occur. We'll continue to use 'now' above for the baseline
// of calculating how long we should wait, to reduce variance.
- int64_t new_now = TickTime::MillisecondTimestamp();
+ int64_t new_now = rtc::TimeMillis();
m.next_callback = GetNextCallbackTime(m.module, new_now);
}
@@ -227,7 +227,7 @@ bool ProcessThreadImpl::Process() {
}
}
- int64_t time_to_wait = next_checkpoint - TickTime::MillisecondTimestamp();
+ int64_t time_to_wait = next_checkpoint - rtc::TimeMillis();
if (time_to_wait > 0)
wake_up_->Wait(static_cast<unsigned long>(time_to_wait));
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
index 2855ed9d850..330aec946c9 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
@@ -33,7 +33,7 @@ class ProcessThreadImpl : public ProcessThread {
void Stop() override;
void WakeUp(Module* module) override;
- void PostTask(rtc::scoped_ptr<ProcessTask> task) override;
+ void PostTask(std::unique_ptr<ProcessTask> task) override;
void RegisterModule(Module* module) override;
void DeRegisterModule(Module* module) override;
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
index 9fa9edfa24a..5b31870ac41 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
@@ -13,9 +13,9 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/utility/source/process_thread_impl.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -51,7 +51,7 @@ ACTION_P(Increment, counter) {
}
ACTION_P(SetTimestamp, ptr) {
- *ptr = TickTime::MillisecondTimestamp();
+ *ptr = rtc::TimeMillis();
}
TEST(ProcessThreadImpl, StartStop) {
@@ -297,7 +297,7 @@ TEST(ProcessThreadImpl, PostTask) {
std::unique_ptr<EventWrapper> task_ran(EventWrapper::Create());
std::unique_ptr<RaiseEventTask> task(new RaiseEventTask(task_ran.get()));
thread.Start();
- thread.PostTask(rtc::UniqueToScoped(std::move(task)));
+ thread.PostTask(std::move(task));
EXPECT_EQ(kEventSignaled, task_ran->Wait(100));
thread.Stop();
}
diff --git a/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc b/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 7ab33ffeaba..839ab804ad9 100644
--- a/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -16,19 +16,19 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/video_frame.h"
using webrtc::CriticalSectionWrapper;
using webrtc::CriticalSectionScoped;
using webrtc::SleepMs;
-using webrtc::TickTime;
using webrtc::VideoCaptureAlarm;
using webrtc::VideoCaptureCapability;
using webrtc::VideoCaptureDataCallback;
@@ -40,8 +40,8 @@ using webrtc::VideoCaptureModule;
#define WAIT_(ex, timeout, res) \
do { \
res = (ex); \
- int64_t start = TickTime::MillisecondTimestamp(); \
- while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
+ int64_t start = rtc::TimeMillis(); \
+ while (!res && rtc::TimeMillis() < start + timeout) { \
SleepMs(5); \
res = (ex); \
} \
@@ -60,32 +60,6 @@ static const int kTestHeight = 288;
static const int kTestWidth = 352;
static const int kTestFramerate = 30;
-// Compares the content of two video frames.
-static bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2) {
- bool result =
- (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) &&
- (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) &&
- (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) &&
- (frame1.width() == frame2.width()) &&
- (frame1.height() == frame2.height());
-
- if (!result)
- return false;
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
- int allocated_size1 = frame1.allocated_size(plane_type);
- int allocated_size2 = frame2.allocated_size(plane_type);
- if (allocated_size1 != allocated_size2)
- return false;
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
- return false;
- }
- return true;
-}
-
class TestVideoCaptureCallback : public VideoCaptureDataCallback {
public:
TestVideoCaptureCallback()
@@ -118,8 +92,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
#endif
// RenderTimstamp should be the time now.
EXPECT_TRUE(
- videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
- videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
+ videoFrame.render_time_ms() >= rtc::TimeMillis()-30 &&
+ videoFrame.render_time_ms() <= rtc::TimeMillis());
if ((videoFrame.render_time_ms() >
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
@@ -132,7 +106,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
incoming_frames_++;
last_render_time_ms_ = videoFrame.render_time_ms();
- last_frame_.CopyFrame(videoFrame);
+ last_frame_ = videoFrame.video_frame_buffer();
}
virtual void OnCaptureDelayChanged(const int32_t id,
@@ -168,7 +142,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
bool CompareLastFrame(const webrtc::VideoFrame& frame) {
CriticalSectionScoped cs(capture_cs_.get());
- return CompareFrames(last_frame_, frame);
+ return webrtc::test::FrameBufsEqual(last_frame_,
+ frame.video_frame_buffer());
}
void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) {
@@ -183,7 +158,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
int64_t last_render_time_ms_;
int incoming_frames_;
int timing_warnings_;
- webrtc::VideoFrame last_frame_;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> last_frame_;
webrtc::VideoRotation rotate_frame_;
};
@@ -277,7 +252,7 @@ class VideoCaptureTest : public testing::Test {
#endif
TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
for (int i = 0; i < 5; ++i) {
- int64_t start_time = TickTime::MillisecondTimestamp();
+ int64_t start_time = rtc::TimeMillis();
TestVideoCaptureCallback capture_observer;
rtc::scoped_refptr<VideoCaptureModule> module(
OpenVideoCaptureDevice(0, &capture_observer));
@@ -296,19 +271,19 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
// Less than 4s to start the camera.
- EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
+ EXPECT_LE(rtc::TimeMillis() - start_time, 4000);
// Make sure 5 frames are captured.
EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
EXPECT_GE(capture_observer.capture_delay(), 0);
- int64_t stop_time = TickTime::MillisecondTimestamp();
+ int64_t stop_time = rtc::TimeMillis();
EXPECT_EQ(0, module->StopCapture());
EXPECT_FALSE(module->CaptureStarted());
// Less than 3s to stop the camera.
- EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
+ EXPECT_LE(rtc::TimeMillis() - stop_time, 3000);
}
}
@@ -434,8 +409,7 @@ class VideoCaptureExternalTest : public testing::Test {
public:
void SetUp() {
capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
- process_module_ =
- rtc::ScopedToUnique(webrtc::ProcessThread::Create("ProcessThread"));
+ process_module_ = webrtc::ProcessThread::Create("ProcessThread");
process_module_->Start();
process_module_->RegisterModule(capture_module_);
@@ -449,10 +423,11 @@ class VideoCaptureExternalTest : public testing::Test {
test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
- memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight);
- memset(test_frame_.buffer(webrtc::kUPlane), 127,
+ memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
+ kTestWidth * kTestHeight);
+ memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
- memset(test_frame_.buffer(webrtc::kVPlane), 127,
+ memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
capture_module_->RegisterCaptureDataCallback(capture_callback_);
@@ -493,10 +468,10 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
#define MAYBE_FrameRate FrameRate
#endif
TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
- int64_t testTime = 3;
- TickTime startTime = TickTime::Now();
+ uint64_t testTime = 3 * rtc::kNumNanosecsPerSec;
+ uint64_t startTime = rtc::TimeNanos();
- while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
@@ -511,8 +486,8 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
SleepMs(500);
EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm());
- startTime = TickTime::Now();
- while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ startTime = rtc::TimeNanos();
+ while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
index c80f2bf5b56..a2b2f58c9f2 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
@@ -172,6 +172,7 @@
'video_capture_module_internal_impl',
'webrtc_utility',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:video_test_common',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
index b1e697edc22..c3d5f370911 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
@@ -13,6 +13,7 @@
#include <stdlib.h>
#include "webrtc/base/refcount.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/include/module_common_types.h"
@@ -20,7 +21,6 @@
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace videocapturemodule {
@@ -85,7 +85,8 @@ int64_t VideoCaptureImpl::TimeUntilNextProcess()
CriticalSectionScoped cs(&_callBackCs);
const int64_t kProcessIntervalMs = 300;
return kProcessIntervalMs -
- (TickTime::Now() - _lastProcessTime).Milliseconds();
+ (rtc::TimeNanos() - _lastProcessTimeNanos) /
+ rtc::kNumNanosecsPerMillisec;
}
// Process any pending tasks such as timeouts
@@ -93,12 +94,12 @@ void VideoCaptureImpl::Process()
{
CriticalSectionScoped cs(&_callBackCs);
- const TickTime now = TickTime::Now();
- _lastProcessTime = TickTime::Now();
+ const int64_t now_ns = rtc::TimeNanos();
+ _lastProcessTimeNanos = rtc::TimeNanos();
// Handle No picture alarm
- if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() &&
+ if (_lastProcessFrameTimeNanos == _incomingFrameTimesNanos[0] &&
_captureAlarm != Raised)
{
if (_noPictureAlarmCallBack && _captureCallBack)
@@ -107,7 +108,7 @@ void VideoCaptureImpl::Process()
_captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
}
}
- else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() &&
+ else if (_lastProcessFrameTimeNanos != _incomingFrameTimesNanos[0] &&
_captureAlarm != Cleared)
{
if (_noPictureAlarmCallBack && _captureCallBack)
@@ -119,19 +120,21 @@ void VideoCaptureImpl::Process()
}
// Handle frame rate callback
- if ((now - _lastFrameRateCallbackTime).Milliseconds()
+ if ((now_ns - _lastFrameRateCallbackTimeNanos) /
+ rtc::kNumNanosecsPerMillisec
> kFrameRateCallbackInterval)
{
if (_frameRateCallBack && _captureCallBack)
{
- const uint32_t frameRate = CalculateFrameRate(now);
+ const uint32_t frameRate = CalculateFrameRate(now_ns);
_captureCallBack->OnCaptureFrameRate(_id, frameRate);
}
- _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback
+ // Can be set by EnableFrameRateCallback
+ _lastFrameRateCallbackTimeNanos = now_ns;
}
- _lastProcessFrameCount = _incomingFrameTimes[0];
+ _lastProcessFrameTimeNanos = _incomingFrameTimesNanos[0];
}
VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
@@ -141,15 +144,15 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_captureDelay(0),
_requestedCapability(),
_callBackCs(*CriticalSectionWrapper::CreateCriticalSection()),
- _lastProcessTime(TickTime::Now()),
- _lastFrameRateCallbackTime(TickTime::Now()),
+ _lastProcessTimeNanos(rtc::TimeNanos()),
+ _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()),
_frameRateCallBack(false),
_noPictureAlarmCallBack(false),
_captureAlarm(Cleared),
_setCaptureDelay(0),
_dataCallBack(NULL),
_captureCallBack(NULL),
- _lastProcessFrameCount(TickTime::Now()),
+ _lastProcessFrameTimeNanos(rtc::TimeNanos()),
_rotateFrame(kVideoRotation_0),
apply_rotation_(false) {
_requestedCapability.width = kDefaultWidth;
@@ -157,7 +160,7 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_requestedCapability.maxFPS = 30;
_requestedCapability.rawType = kVideoI420;
_requestedCapability.codecType = kVideoCodecUnknown;
- memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+ memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
}
VideoCaptureImpl::~VideoCaptureImpl()
@@ -295,7 +298,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
_captureFrame.set_rotation(kVideoRotation_0);
}
_captureFrame.set_ntp_time_ms(captureTime);
- _captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
+ _captureFrame.set_render_time_ms(rtc::TimeMillis());
DeliverCapturedFrame(_captureFrame);
}
@@ -321,7 +324,7 @@ void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
_frameRateCallBack = enable;
if (enable)
{
- _lastFrameRateCallbackTime = TickTime::Now();
+ _lastFrameRateCallbackTimeNanos = rtc::TimeNanos();
}
}
@@ -341,7 +344,7 @@ void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
void VideoCaptureImpl::UpdateFrameCount()
{
- if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0)
+ if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
{
// first no shift
}
@@ -350,20 +353,22 @@ void VideoCaptureImpl::UpdateFrameCount()
// shift
for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
{
- _incomingFrameTimes[i + 1] = _incomingFrameTimes[i];
+ _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
}
}
- _incomingFrameTimes[0] = TickTime::Now();
+ _incomingFrameTimesNanos[0] = rtc::TimeNanos();
}
-uint32_t VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
+uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
{
int32_t num = 0;
int32_t nrOfFrames = 0;
for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
{
- if (_incomingFrameTimes[num].Ticks() <= 0
- || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec
+ if (_incomingFrameTimesNanos[num] <= 0 ||
+ (now_ns - _incomingFrameTimesNanos[num]) /
+ rtc::kNumNanosecsPerMillisec >
+ kFrameRateHistoryWindowMs) // don't use data older than 2sec
{
break;
}
@@ -374,7 +379,8 @@ uint32_t VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
}
if (num > 1)
{
- int64_t diff = (now - _incomingFrameTimes[num - 1]).Milliseconds();
+ int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
+ rtc::kNumNanosecsPerMillisec;
if (diff > 0)
{
return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
index 9c2cad7c95c..7d785c3a908 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
@@ -20,7 +20,6 @@
#include "webrtc/common_video/rotation.h"
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video_frame.h"
namespace webrtc
@@ -116,12 +115,14 @@ protected:
VideoCaptureCapability _requestedCapability; // Should be set by platform dependent code in StartCapture.
private:
void UpdateFrameCount();
- uint32_t CalculateFrameRate(const TickTime& now);
+ uint32_t CalculateFrameRate(int64_t now_ns);
CriticalSectionWrapper& _callBackCs;
- TickTime _lastProcessTime; // last time the module process function was called.
- TickTime _lastFrameRateCallbackTime; // last time the frame rate callback function was called.
+ // last time the module process function was called.
+ int64_t _lastProcessTimeNanos;
+ // last time the frame rate callback function was called.
+ int64_t _lastFrameRateCallbackTimeNanos;
bool _frameRateCallBack; // true if EnableFrameRateCallback
bool _noPictureAlarmCallBack; //true if EnableNoPictureAlarm
VideoCaptureAlarm _captureAlarm; // current value of the noPictureAlarm
@@ -130,8 +131,9 @@ private:
VideoCaptureDataCallback* _dataCallBack;
VideoCaptureFeedBack* _captureCallBack;
- TickTime _lastProcessFrameCount;
- TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
+ int64_t _lastProcessFrameTimeNanos;
+ // timestamp for local captured frames
+ int64_t _incomingFrameTimesNanos[kFrameRateCountHistorySize];
VideoRotation _rotateFrame; // Set if the frame should be rotated by the
// capture module.
diff --git a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
index bc6f595b51d..755e6efa9da 100644
--- a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
@@ -10,14 +10,10 @@ import("../../build/webrtc.gni")
source_set("video_coding") {
sources = [
- "bitrate_adjuster.cc",
- "bitrate_adjuster.h",
"codec_database.cc",
"codec_database.h",
"codec_timer.cc",
"codec_timer.h",
- "content_metrics_processing.cc",
- "content_metrics_processing.h",
"decoding_state.cc",
"decoding_state.h",
"encoded_frame.cc",
@@ -25,6 +21,8 @@ source_set("video_coding") {
"fec_tables_xor.h",
"frame_buffer.cc",
"frame_buffer.h",
+ "frame_buffer2.cc",
+ "frame_buffer2.h",
"frame_object.cc",
"frame_object.h",
"generic_decoder.cc",
@@ -56,11 +54,10 @@ source_set("video_coding") {
"packet_buffer.h",
"percentile_filter.cc",
"percentile_filter.h",
- "qm_select.cc",
- "qm_select.h",
- "qm_select_data.h",
"receiver.cc",
"receiver.h",
+ "rtp_frame_reference_finder.cc",
+ "rtp_frame_reference_finder.h",
"rtt_filter.cc",
"rtt_filter.h",
"session_info.cc",
@@ -107,6 +104,8 @@ source_set("video_coding_utility") {
sources = [
"utility/frame_dropper.cc",
"utility/frame_dropper.h",
+ "utility/ivf_file_writer.cc",
+ "utility/ivf_file_writer.h",
"utility/moving_average.h",
"utility/qp_parser.cc",
"utility/qp_parser.h",
diff --git a/chromium/third_party/webrtc/modules/video_coding/OWNERS b/chromium/third_party/webrtc/modules/video_coding/OWNERS
index 389d632dfdf..acf09399ac7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_coding/OWNERS
@@ -1,5 +1,6 @@
-stefan@webrtc.org
marpan@webrtc.org
+pbos@webrtc.org
+stefan@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
diff --git a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc b/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc
deleted file mode 100644
index b6828ee6e1e..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
-
-#include <cmath>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-// Update bitrate at most once every second.
-const uint32_t BitrateAdjuster::kBitrateUpdateIntervalMs = 1000;
-
-// Update bitrate at most once every 30 frames.
-const uint32_t BitrateAdjuster::kBitrateUpdateFrameInterval = 30;
-
-// 10 percent of original.
-const float BitrateAdjuster::kBitrateTolerancePct = .1f;
-
-const float BitrateAdjuster::kBytesPerMsToBitsPerSecond = 8 * 1000;
-
-BitrateAdjuster::BitrateAdjuster(Clock* clock,
- float min_adjusted_bitrate_pct,
- float max_adjusted_bitrate_pct)
- : clock_(clock),
- min_adjusted_bitrate_pct_(min_adjusted_bitrate_pct),
- max_adjusted_bitrate_pct_(max_adjusted_bitrate_pct),
- bitrate_tracker_(1.5 * kBitrateUpdateIntervalMs,
- kBytesPerMsToBitsPerSecond) {
- Reset();
-}
-
-void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) {
- rtc::CritScope cs(&crit_);
- // If the change in target bitrate is large, update the adjusted bitrate
- // immediately since it's likely we have gained or lost a sizeable amount of
- // bandwidth and we'll want to respond quickly.
- // If the change in target bitrate fits within the existing tolerance of
- // encoder output, wait for the next adjustment time to preserve
- // existing penalties and not forcibly reset the adjusted bitrate to target.
- // However, if we received many small deltas within an update time
- // window and one of them exceeds the tolerance when compared to the last
- // target we updated against, treat it as a large change in target bitrate.
- if (!IsWithinTolerance(bitrate_bps, target_bitrate_bps_) ||
- !IsWithinTolerance(bitrate_bps, last_adjusted_target_bitrate_bps_)) {
- adjusted_bitrate_bps_ = bitrate_bps;
- last_adjusted_target_bitrate_bps_ = bitrate_bps;
- }
- target_bitrate_bps_ = bitrate_bps;
-}
-
-uint32_t BitrateAdjuster::GetTargetBitrateBps() const {
- rtc::CritScope cs(&crit_);
- return target_bitrate_bps_;
-}
-
-uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const {
- rtc::CritScope cs(&crit_);
- return adjusted_bitrate_bps_;
-}
-
-uint32_t BitrateAdjuster::GetEstimatedBitrateBps() {
- rtc::CritScope cs(&crit_);
- return bitrate_tracker_.Rate(clock_->TimeInMilliseconds());
-}
-
-void BitrateAdjuster::Update(size_t frame_size) {
- rtc::CritScope cs(&crit_);
- uint32_t current_time_ms = clock_->TimeInMilliseconds();
- bitrate_tracker_.Update(frame_size, current_time_ms);
- UpdateBitrate(current_time_ms);
-}
-
-bool BitrateAdjuster::IsWithinTolerance(uint32_t bitrate_bps,
- uint32_t target_bitrate_bps) {
- if (target_bitrate_bps == 0) {
- return false;
- }
- float delta = std::abs(static_cast<float>(bitrate_bps) -
- static_cast<float>(target_bitrate_bps));
- float delta_pct = delta / target_bitrate_bps;
- return delta_pct < kBitrateTolerancePct;
-}
-
-uint32_t BitrateAdjuster::GetMinAdjustedBitrateBps() const {
- return min_adjusted_bitrate_pct_ * target_bitrate_bps_;
-}
-
-uint32_t BitrateAdjuster::GetMaxAdjustedBitrateBps() const {
- return max_adjusted_bitrate_pct_ * target_bitrate_bps_;
-}
-
-// Only safe to call this after Update calls have stopped
-void BitrateAdjuster::Reset() {
- rtc::CritScope cs(&crit_);
- target_bitrate_bps_ = 0;
- adjusted_bitrate_bps_ = 0;
- last_adjusted_target_bitrate_bps_ = 0;
- last_bitrate_update_time_ms_ = 0;
- frames_since_last_update_ = 0;
- bitrate_tracker_.Reset();
-}
-
-void BitrateAdjuster::UpdateBitrate(uint32_t current_time_ms) {
- uint32_t time_since_last_update_ms =
- current_time_ms - last_bitrate_update_time_ms_;
- // Don't attempt to update bitrate unless enough time and frames have passed.
- ++frames_since_last_update_;
- if (time_since_last_update_ms < kBitrateUpdateIntervalMs ||
- frames_since_last_update_ < kBitrateUpdateFrameInterval) {
- return;
- }
- float estimated_bitrate_bps = bitrate_tracker_.Rate(current_time_ms);
- float target_bitrate_bps = target_bitrate_bps_;
- float error = target_bitrate_bps - estimated_bitrate_bps;
-
- // Adjust if we've overshot by any amount or if we've undershot too much.
- if (estimated_bitrate_bps > target_bitrate_bps ||
- error > kBitrateTolerancePct * target_bitrate_bps) {
- // Adjust the bitrate by a fraction of the error.
- float adjustment = .5 * error;
- float adjusted_bitrate_bps = target_bitrate_bps + adjustment;
-
- // Clamp the adjustment.
- float min_bitrate_bps = GetMinAdjustedBitrateBps();
- float max_bitrate_bps = GetMaxAdjustedBitrateBps();
- adjusted_bitrate_bps = std::max(adjusted_bitrate_bps, min_bitrate_bps);
- adjusted_bitrate_bps = std::min(adjusted_bitrate_bps, max_bitrate_bps);
-
- // Set the adjustment if it's not already set.
- float last_adjusted_bitrate_bps = adjusted_bitrate_bps_;
- if (adjusted_bitrate_bps != last_adjusted_bitrate_bps) {
- LOG(LS_VERBOSE) << "Adjusting encoder bitrate:"
- << "\n target_bitrate:"
- << static_cast<uint32_t>(target_bitrate_bps)
- << "\n estimated_bitrate:"
- << static_cast<uint32_t>(estimated_bitrate_bps)
- << "\n last_adjusted_bitrate:"
- << static_cast<uint32_t>(last_adjusted_bitrate_bps)
- << "\n adjusted_bitrate:"
- << static_cast<uint32_t>(adjusted_bitrate_bps);
- adjusted_bitrate_bps_ = adjusted_bitrate_bps;
- }
- }
- last_bitrate_update_time_ms_ = current_time_ms;
- frames_since_last_update_ = 0;
- last_adjusted_target_bitrate_bps_ = target_bitrate_bps_;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc
deleted file mode 100644
index 1d14ee31606..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
-#include "webrtc/system_wrappers/include/clock.h"
-
-namespace webrtc {
-
-class BitrateAdjusterTest : public ::testing::Test {
- public:
- BitrateAdjusterTest()
- : clock_(0),
- adjuster_(&clock_, kMinAdjustedBitratePct, kMaxAdjustedBitratePct) {}
-
- // Simulate an output bitrate for one update cycle of BitrateAdjuster.
- void SimulateBitrateBps(uint32_t bitrate_bps) {
- const uint32_t update_interval_ms =
- BitrateAdjuster::kBitrateUpdateIntervalMs;
- const uint32_t update_frame_interval =
- BitrateAdjuster::kBitrateUpdateFrameInterval;
- // Round up frame interval so we get one cycle passes.
- const uint32_t frame_interval_ms =
- (update_interval_ms + update_frame_interval - 1) /
- update_frame_interval;
- const size_t frame_size_bytes =
- (bitrate_bps * frame_interval_ms) / (8 * 1000);
- for (size_t i = 0; i < update_frame_interval; ++i) {
- clock_.AdvanceTimeMilliseconds(frame_interval_ms);
- adjuster_.Update(frame_size_bytes);
- }
- }
-
- uint32_t GetTargetBitrateBpsPct(float pct) {
- return pct * adjuster_.GetTargetBitrateBps();
- }
-
- void VerifyAdjustment() {
- // The adjusted bitrate should be between the estimated bitrate and the
- // target bitrate within clamp.
- uint32_t target_bitrate_bps = adjuster_.GetTargetBitrateBps();
- uint32_t adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
- uint32_t estimated_bitrate_bps = adjuster_.GetEstimatedBitrateBps();
- uint32_t adjusted_lower_bound_bps =
- GetTargetBitrateBpsPct(kMinAdjustedBitratePct);
- uint32_t adjusted_upper_bound_bps =
- GetTargetBitrateBpsPct(kMaxAdjustedBitratePct);
- EXPECT_LE(adjusted_bitrate_bps, adjusted_upper_bound_bps);
- EXPECT_GE(adjusted_bitrate_bps, adjusted_lower_bound_bps);
- if (estimated_bitrate_bps > target_bitrate_bps) {
- EXPECT_LT(adjusted_bitrate_bps, target_bitrate_bps);
- }
- }
-
- protected:
- static const float kMinAdjustedBitratePct;
- static const float kMaxAdjustedBitratePct;
- SimulatedClock clock_;
- BitrateAdjuster adjuster_;
-};
-
-const float BitrateAdjusterTest::kMinAdjustedBitratePct = .5f;
-const float BitrateAdjusterTest::kMaxAdjustedBitratePct = .95f;
-
-TEST_F(BitrateAdjusterTest, VaryingBitrates) {
- const uint32_t target_bitrate_bps = 640000;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
-
- // Grossly overshoot for a little while. Adjusted bitrate should decrease.
- uint32_t actual_bitrate_bps = 2 * target_bitrate_bps;
- uint32_t last_adjusted_bitrate_bps = 0;
- uint32_t adjusted_bitrate_bps = 0;
-
- SimulateBitrateBps(actual_bitrate_bps);
- VerifyAdjustment();
- last_adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
-
- SimulateBitrateBps(actual_bitrate_bps);
- VerifyAdjustment();
- adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
- EXPECT_LT(adjusted_bitrate_bps, last_adjusted_bitrate_bps);
- last_adjusted_bitrate_bps = adjusted_bitrate_bps;
- // After two cycles we should've stabilized and hit the lower bound.
- EXPECT_EQ(GetTargetBitrateBpsPct(kMinAdjustedBitratePct),
- adjusted_bitrate_bps);
-
- // Simulate encoder settling down. Adjusted bitrate should increase.
- SimulateBitrateBps(target_bitrate_bps);
- adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
- VerifyAdjustment();
- EXPECT_GT(adjusted_bitrate_bps, last_adjusted_bitrate_bps);
- last_adjusted_bitrate_bps = adjusted_bitrate_bps;
-
- SimulateBitrateBps(target_bitrate_bps);
- adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
- VerifyAdjustment();
- EXPECT_GT(adjusted_bitrate_bps, last_adjusted_bitrate_bps);
- last_adjusted_bitrate_bps = adjusted_bitrate_bps;
- // After two cycles we should've stabilized and hit the upper bound.
- EXPECT_EQ(GetTargetBitrateBpsPct(kMaxAdjustedBitratePct),
- adjusted_bitrate_bps);
-}
-
-// Tests that large changes in target bitrate will result in immediate change
-// in adjusted bitrate.
-TEST_F(BitrateAdjusterTest, LargeTargetDelta) {
- uint32_t target_bitrate_bps = 640000;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- float delta_pct = BitrateAdjuster::kBitrateTolerancePct * 2;
-
- target_bitrate_bps = (1 + delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- target_bitrate_bps = (1 - delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-}
-
-// Tests that small changes in target bitrate within tolerance will not affect
-// adjusted bitrate immediately.
-TEST_F(BitrateAdjusterTest, SmallTargetDelta) {
- const uint32_t initial_target_bitrate_bps = 640000;
- uint32_t target_bitrate_bps = initial_target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(initial_target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- float delta_pct = BitrateAdjuster::kBitrateTolerancePct / 2;
-
- target_bitrate_bps = (1 + delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(initial_target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- target_bitrate_bps = (1 - delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(initial_target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-}
-
-TEST_F(BitrateAdjusterTest, SmallTargetDeltaOverflow) {
- const uint32_t initial_target_bitrate_bps = 640000;
- uint32_t target_bitrate_bps = initial_target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(initial_target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- float delta_pct = BitrateAdjuster::kBitrateTolerancePct / 2;
-
- target_bitrate_bps = (1 + delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(initial_target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-
- // 1.05 * 1.05 is 1.1 which is greater than tolerance for the initial target
- // bitrate. Since we didn't advance the clock the adjuster never updated.
- target_bitrate_bps = (1 + delta_pct) * target_bitrate_bps;
- adjuster_.SetTargetBitrateBps(target_bitrate_bps);
- EXPECT_EQ(target_bitrate_bps, adjuster_.GetAdjustedBitrateBps());
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codec_database.cc b/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
index a5a7c1ea999..1baa414bce7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
@@ -238,7 +238,6 @@ bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
memcpy(&send_codec_, &new_send_codec, sizeof(send_codec_));
if (!reset_required) {
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
return true;
}
@@ -249,7 +248,6 @@ bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
ptr_encoder_.reset(
new VCMGenericEncoder(external_encoder_, encoder_rate_observer_,
encoded_frame_callback_, internal_source_));
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
encoded_frame_callback_->SetInternalSource(internal_source_);
if (ptr_encoder_->InitEncode(&send_codec_, number_of_cores_,
max_payload_size_) < 0) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
index 9cb58a85986..92489c39665 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
@@ -64,7 +64,7 @@
'type': 'static_library',
'includes': [ '../../../../build/objc_common.gypi' ],
'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
],
'link_settings': {
'xcode_settings': {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index e98666d0736..f560a37d0ec 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -129,10 +129,12 @@ int H264DecoderImpl::AVGetBuffer2(
video_frame->set_video_frame_buffer(
decoder->pool_.CreateBuffer(width, height));
// DCHECK that we have a continuous buffer as is required.
- RTC_DCHECK_EQ(video_frame->buffer(kUPlane),
- video_frame->buffer(kYPlane) + video_frame->allocated_size(kYPlane));
- RTC_DCHECK_EQ(video_frame->buffer(kVPlane),
- video_frame->buffer(kUPlane) + video_frame->allocated_size(kUPlane));
+ RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
+ video_frame->video_frame_buffer()->DataY() +
+ video_frame->allocated_size(kYPlane));
+ RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
+ video_frame->video_frame_buffer()->DataU() +
+ video_frame->allocated_size(kUPlane));
int total_size = video_frame->allocated_size(kYPlane) +
video_frame->allocated_size(kUPlane) +
video_frame->allocated_size(kVPlane);
@@ -141,12 +143,18 @@ int H264DecoderImpl::AVGetBuffer2(
av_frame->reordered_opaque = context->reordered_opaque;
// Set |av_frame| members as required by FFmpeg.
- av_frame->data[kYPlaneIndex] = video_frame->buffer(kYPlane);
- av_frame->linesize[kYPlaneIndex] = video_frame->stride(kYPlane);
- av_frame->data[kUPlaneIndex] = video_frame->buffer(kUPlane);
- av_frame->linesize[kUPlaneIndex] = video_frame->stride(kUPlane);
- av_frame->data[kVPlaneIndex] = video_frame->buffer(kVPlane);
- av_frame->linesize[kVPlaneIndex] = video_frame->stride(kVPlane);
+ av_frame->data[kYPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataY();
+ av_frame->linesize[kYPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideY();
+ av_frame->data[kUPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataU();
+ av_frame->linesize[kUPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideU();
+ av_frame->data[kVPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataV();
+ av_frame->linesize[kVPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideV();
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
@@ -339,9 +347,12 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
VideoFrame* video_frame = static_cast<VideoFrame*>(
av_buffer_get_opaque(av_frame_->buf[0]));
RTC_DCHECK(video_frame);
- RTC_CHECK_EQ(av_frame_->data[kYPlane], video_frame->buffer(kYPlane));
- RTC_CHECK_EQ(av_frame_->data[kUPlane], video_frame->buffer(kUPlane));
- RTC_CHECK_EQ(av_frame_->data[kVPlane], video_frame->buffer(kVPlane));
+ RTC_CHECK_EQ(av_frame_->data[kYPlane],
+ video_frame->video_frame_buffer()->DataY());
+ RTC_CHECK_EQ(av_frame_->data[kUPlane],
+ video_frame->video_frame_buffer()->DataU());
+ RTC_CHECK_EQ(av_frame_->data[kVPlane],
+ video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
// The decoded image may be larger than what is supposed to be visible, see
@@ -352,9 +363,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
video_frame->set_video_frame_buffer(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
- buf->data(kYPlane), buf->stride(kYPlane),
- buf->data(kUPlane), buf->stride(kUPlane),
- buf->data(kVPlane), buf->stride(kVPlane),
+ buf->DataY(), buf->StrideY(),
+ buf->DataU(), buf->StrideU(),
+ buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 0e065c5e497..4d85858a162 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -50,12 +50,8 @@ int NumberOfThreads(int width, int height, int number_of_cores) {
return 1;
}
-} // namespace
-
-static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) {
+FrameType ConvertToVideoFrameType(EVideoFrameType type) {
switch (type) {
- case videoFrameTypeInvalid:
- return kEmptyFrame;
case videoFrameTypeIDR:
return kVideoFrameKey;
case videoFrameTypeSkip:
@@ -63,12 +59,15 @@ static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) {
case videoFrameTypeP:
case videoFrameTypeIPMixed:
return kVideoFrameDelta;
- default:
- LOG(LS_WARNING) << "Unknown EVideoFrameType: " << type;
- return kVideoFrameDelta;
+ case videoFrameTypeInvalid:
+ break;
}
+ RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type;
+ return kEmptyFrame;
}
+} // namespace
+
// Helper method used by H264EncoderImpl::Encode.
// Copies the encoded bytes from |info| to |encoded_image| and updates the
// fragmentation information of |frag_header|. The |encoded_image->_buffer| may
@@ -368,12 +367,12 @@ int32_t H264EncoderImpl::Encode(
picture.iPicHeight = frame.height();
picture.iColorFormat = EVideoFormatType::videoFormatI420;
picture.uiTimeStamp = frame.ntp_time_ms();
- picture.iStride[0] = frame.stride(kYPlane);
- picture.iStride[1] = frame.stride(kUPlane);
- picture.iStride[2] = frame.stride(kVPlane);
- picture.pData[0] = const_cast<uint8_t*>(frame.buffer(kYPlane));
- picture.pData[1] = const_cast<uint8_t*>(frame.buffer(kUPlane));
- picture.pData[2] = const_cast<uint8_t*>(frame.buffer(kVPlane));
+ picture.iStride[0] = frame.video_frame_buffer()->StrideY();
+ picture.iStride[1] = frame.video_frame_buffer()->StrideU();
+ picture.iStride[2] = frame.video_frame_buffer()->StrideV();
+ picture.pData[0] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataY());
+ picture.pData[1] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataU());
+ picture.pData[2] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataV());
// EncodeFrame output.
SFrameBSInfo info;
@@ -393,7 +392,8 @@ int32_t H264EncoderImpl::Encode(
encoded_image_._timeStamp = frame.timestamp();
encoded_image_.ntp_time_ms_ = frame.ntp_time_ms();
encoded_image_.capture_time_ms_ = frame.render_time_ms();
- encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType);
+ encoded_image_.rotation_ = frame.rotation();
+ encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates |encoded_image_|.
RTPFragmentationHeader frag_header;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
index 0ea2600197e..18820d3ded7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
@@ -15,12 +15,12 @@
#include <memory>
+#if defined(WEBRTC_IOS)
+#include "RTCUIApplication.h"
+#endif
#include "libyuv/convert.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#if defined(WEBRTC_IOS)
-#include "webrtc/base/objc/RTCUIApplication.h"
-#endif
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/video_frame.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
index 8cfe63dbb78..5f6a231288d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
@@ -17,12 +17,12 @@
#include <string>
#include <vector>
+#if defined(WEBRTC_IOS)
+#include "RTCUIApplication.h"
+#endif
#include "libyuv/convert_from.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#if defined(WEBRTC_IOS)
-#include "webrtc/base/objc/RTCUIApplication.h"
-#endif
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -118,8 +118,14 @@ struct FrameEncodeParams {
int32_t w,
int32_t h,
int64_t rtms,
- uint32_t ts)
- : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
+ uint32_t ts,
+ webrtc::VideoRotation r)
+ : encoder(e),
+ width(w),
+ height(h),
+ render_time_ms(rtms),
+ timestamp(ts),
+ rotation(r) {
if (csi) {
codec_specific_info = *csi;
} else {
@@ -133,6 +139,7 @@ struct FrameEncodeParams {
int32_t height;
int64_t render_time_ms;
uint32_t timestamp;
+ webrtc::VideoRotation rotation;
};
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
@@ -161,10 +168,14 @@ bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame,
int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
// Convert I420 to NV12.
int ret = libyuv::I420ToNV12(
- frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
- frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
- frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), dst_y,
- dst_stride_y, dst_uv, dst_stride_uv, frame.width(), frame.height());
+ frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->StrideY(),
+ frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->StrideU(),
+ frame.video_frame_buffer()->DataV(),
+ frame.video_frame_buffer()->StrideV(),
+ dst_y, dst_stride_y, dst_uv, dst_stride_uv,
+ frame.width(), frame.height());
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
@@ -185,7 +196,8 @@ void VTCompressionOutputCallback(void* encoder,
encode_params->encoder->OnEncodedFrame(
status, info_flags, sample_buffer, encode_params->codec_specific_info,
encode_params->width, encode_params->height,
- encode_params->render_time_ms, encode_params->timestamp);
+ encode_params->render_time_ms, encode_params->timestamp,
+ encode_params->rotation);
}
} // namespace internal
@@ -248,6 +260,7 @@ int H264VideoToolboxEncoder::Encode(
return WEBRTC_VIDEO_CODEC_OK;
}
#endif
+ bool is_keyframe_required = false;
// Get a pixel buffer from the pool and copy frame data over.
CVPixelBufferPoolRef pixel_buffer_pool =
VTCompressionSessionGetPixelBufferPool(compression_session_);
@@ -257,9 +270,11 @@ int H264VideoToolboxEncoder::Encode(
// invalidated, which causes this pool call to fail when the application
// is foregrounded and frames are being sent for encoding again.
// Resetting the session when this happens fixes the issue.
+ // In addition we request a keyframe so video can recover quickly.
ResetCompressionSession();
pixel_buffer_pool =
VTCompressionSessionGetPixelBufferPool(compression_session_);
+ is_keyframe_required = true;
}
#endif
if (!pixel_buffer_pool) {
@@ -283,8 +298,7 @@ int H264VideoToolboxEncoder::Encode(
}
// Check if we need a keyframe.
- bool is_keyframe_required = false;
- if (frame_types) {
+ if (!is_keyframe_required && frame_types) {
for (auto frame_type : *frame_types) {
if (frame_type == kVideoFrameKey) {
is_keyframe_required = true;
@@ -304,7 +318,7 @@ int H264VideoToolboxEncoder::Encode(
std::unique_ptr<internal::FrameEncodeParams> encode_params;
encode_params.reset(new internal::FrameEncodeParams(
this, codec_specific_info, width_, height_, input_image.render_time_ms(),
- input_image.timestamp()));
+ input_image.timestamp(), input_image.rotation()));
// Update the bitrate if needed.
SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps());
@@ -469,7 +483,8 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
int32_t width,
int32_t height,
int64_t render_time_ms,
- uint32_t timestamp) {
+ uint32_t timestamp,
+ VideoRotation rotation) {
if (status != noErr) {
LOG(LS_ERROR) << "H264 encode failed.";
return;
@@ -509,6 +524,7 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta;
frame.capture_time_ms_ = render_time_ms;
frame._timeStamp = timestamp;
+ frame.rotation_ = rotation;
int result = callback_->Encoded(frame, &codec_specific_info, header.get());
if (result != 0) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
index 779889d43cc..d54fa612c3d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
@@ -12,8 +12,9 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
+#include "webrtc/common_video/include/bitrate_adjuster.h"
+#include "webrtc/common_video/rotation.h"
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
#if defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
@@ -58,7 +59,8 @@ class H264VideoToolboxEncoder : public H264Encoder {
int32_t width,
int32_t height,
int64_t render_time_ms,
- uint32_t timestamp);
+ uint32_t timestamp,
+ VideoRotation rotation);
private:
int ResetCompressionSession();
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index e64babd599f..9a9a0ddf165 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -17,6 +17,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/timeutils.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
namespace webrtc {
@@ -198,7 +199,7 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
- encode_start_ = TickTime::Now();
+ encode_start_ns_ = rtc::TimeNanos();
// Use the frame number as "timestamp" to identify frames
source_frame_.set_timestamp(frame_number);
@@ -248,11 +249,11 @@ void VideoProcessorImpl::FrameEncoded(
encoded_frame_type_ = encoded_image._frameType;
- TickTime encode_stop = TickTime::Now();
+ int64_t encode_stop_ns = rtc::TimeNanos();
int frame_number = encoded_image._timeStamp;
FrameStatistic& stat = stats_->stats_[frame_number];
stat.encode_time_in_us =
- GetElapsedTimeMicroseconds(encode_start_, encode_stop);
+ GetElapsedTimeMicroseconds(encode_start_ns_, encode_stop_ns);
stat.encoding_successful = true;
stat.encoded_frame_length_in_bytes = encoded_image._length;
stat.frame_number = encoded_image._timeStamp;
@@ -299,7 +300,7 @@ void VideoProcessorImpl::FrameEncoded(
// Keep track of if frames are lost due to packet loss so we can tell
// this to the encoder (this is handled by the RTP logic in the full stack)
- decode_start_ = TickTime::Now();
+ decode_start_ns_ = rtc::TimeNanos();
// TODO(kjellander): Pass fragmentation header to the decoder when
// CL 172001 has been submitted and PacketManipulator supports this.
int32_t decode_result =
@@ -315,12 +316,12 @@ void VideoProcessorImpl::FrameEncoded(
}
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
- TickTime decode_stop = TickTime::Now();
+ int64_t decode_stop_ns = rtc::TimeNanos();
int frame_number = image.timestamp();
// Report stats
FrameStatistic& stat = stats_->stats_[frame_number];
stat.decode_time_in_us =
- GetElapsedTimeMicroseconds(decode_start_, decode_stop);
+ GetElapsedTimeMicroseconds(decode_start_ns_, decode_stop_ns);
stat.decoding_successful = true;
// Check for resize action (either down or up):
@@ -378,10 +379,9 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
}
}
-int VideoProcessorImpl::GetElapsedTimeMicroseconds(
- const webrtc::TickTime& start,
- const webrtc::TickTime& stop) {
- uint64_t encode_time = (stop - start).Microseconds();
+int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start,
+ int64_t stop) {
+ uint64_t encode_time = (stop - start) / rtc::kNumNanosecsPerMicrosec;
assert(encode_time <
static_cast<unsigned int>(std::numeric_limits<int>::max()));
return static_cast<int>(encode_time);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
index cd1c7b9d62c..f0322dd67b4 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
@@ -19,7 +19,6 @@
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/stats.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
#include "webrtc/video_frame.h"
@@ -179,8 +178,7 @@ class VideoProcessorImpl : public VideoProcessor {
void FrameDecoded(const webrtc::VideoFrame& image);
// Used for getting a 32-bit integer representing time
// (checks the size is within signed 32-bit bounds before casting it)
- int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
- const webrtc::TickTime& stop);
+ int GetElapsedTimeMicroseconds(int64_t start, int64_t stop);
// Updates the encoder with the target bit rate and the frame rate.
void SetRates(int bit_rate, int frame_rate) override;
// Return the size of the encoded frame in bytes.
@@ -225,8 +223,8 @@ class VideoProcessorImpl : public VideoProcessor {
// Statistics
double bit_rate_factor_; // multiply frame length with this to get bit rate
- webrtc::TickTime encode_start_;
- webrtc::TickTime decode_start_;
+ int64_t encode_start_ns_;
+ int64_t decode_start_ns_;
// Callback class required to implement according to the VideoEncoder API.
class VideoProcessorEncodeCompleteCallback
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 897870a2717..9f361dc6261 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -626,6 +626,7 @@ TEST_F(VideoProcessorIntegrationTest, Process0PercentPacketLossH264) {
// Fails on iOS. See webrtc:4755.
#if !defined(WEBRTC_IOS)
+#if !defined(RTC_DISABLE_VP9)
// VP9: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
@@ -780,6 +781,8 @@ TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDropVP9) {
// TODO(marpan): Add temporal layer test for VP9, once changes are in
// vp9 wrapper for this.
+#endif // !defined(RTC_DISABLE_VP9)
+
// VP8: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
@@ -961,7 +964,7 @@ TEST_F(VideoProcessorIntegrationTest,
SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
- SetRateControlMetrics(rc_metrics, 0, 160, 60, 120, 20, 70, 1, 2);
+ SetRateControlMetrics(rc_metrics, 0, 160, 80, 120, 20, 70, 1, 2);
ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
index d22601358f2..b9721cde1bc 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
@@ -101,10 +101,10 @@ class RealTimeTemporalLayers : public TemporalLayers {
virtual ~RealTimeTemporalLayers() {}
- virtual bool ConfigureBitrates(int bitrate_kbit,
- int max_bitrate_kbit,
- int framerate,
- vpx_codec_enc_cfg_t* cfg) {
+ bool ConfigureBitrates(int bitrate_kbit,
+ int max_bitrate_kbit,
+ int framerate,
+ vpx_codec_enc_cfg_t* cfg) override {
temporal_layers_ =
CalculateNumberOfTemporalLayers(temporal_layers_, framerate);
temporal_layers_ = std::min(temporal_layers_, max_temporal_layers_);
@@ -184,7 +184,7 @@ class RealTimeTemporalLayers : public TemporalLayers {
return true;
}
- virtual int EncodeFlags(uint32_t timestamp) {
+ int EncodeFlags(uint32_t timestamp) override {
frame_counter_++;
return CurrentEncodeFlags();
}
@@ -196,16 +196,16 @@ class RealTimeTemporalLayers : public TemporalLayers {
return encode_flags_[index];
}
- virtual int CurrentLayerId() const {
+ int CurrentLayerId() const override {
assert(layer_ids_length_ > 0 && layer_ids_ != NULL);
int index = frame_counter_ % layer_ids_length_;
assert(index >= 0 && index < layer_ids_length_);
return layer_ids_[index];
}
- virtual void PopulateCodecSpecific(bool base_layer_sync,
- CodecSpecificInfoVP8* vp8_info,
- uint32_t timestamp) {
+ void PopulateCodecSpecific(bool base_layer_sync,
+ CodecSpecificInfoVP8* vp8_info,
+ uint32_t timestamp) override {
assert(temporal_layers_ > 0);
if (temporal_layers_ == 1) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 55a4402cbe4..be55133dd6b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -301,14 +301,21 @@ int SimulcastEncoderAdapter::Encode(
// Aligning stride values based on width.
dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
(dst_width + 1) / 2, (dst_width + 1) / 2);
- libyuv::I420Scale(
- input_image.buffer(kYPlane), input_image.stride(kYPlane),
- input_image.buffer(kUPlane), input_image.stride(kUPlane),
- input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
- src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
- dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
- dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
- dst_height, libyuv::kFilterBilinear);
+ libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
+ input_image.video_frame_buffer()->StrideY(),
+ input_image.video_frame_buffer()->DataU(),
+ input_image.video_frame_buffer()->StrideU(),
+ input_image.video_frame_buffer()->DataV(),
+ input_image.video_frame_buffer()->StrideV(),
+ src_width, src_height,
+ dst_frame.video_frame_buffer()->MutableDataY(),
+ dst_frame.video_frame_buffer()->StrideY(),
+ dst_frame.video_frame_buffer()->MutableDataU(),
+ dst_frame.video_frame_buffer()->StrideU(),
+ dst_frame.video_frame_buffer()->MutableDataV(),
+ dst_frame.video_frame_buffer()->StrideV(),
+ dst_width, dst_height,
+ libyuv::kFilterBilinear);
dst_frame.set_timestamp(input_image.timestamp());
dst_frame.set_render_time_ms(input_image.render_time_ms());
streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,
@@ -494,10 +501,6 @@ void SimulcastEncoderAdapter::OnDroppedFrame() {
streaminfos_[0].encoder->OnDroppedFrame();
}
-int SimulcastEncoderAdapter::GetTargetFramerate() {
- return streaminfos_[0].encoder->GetTargetFramerate();
-}
-
bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
// We should not be calling this method before streaminfos_ are configured.
RTC_DCHECK(!streaminfos_.empty());
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
index 777ac1ba368..fca16df6fad 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
@@ -58,7 +58,6 @@ class SimulcastEncoderAdapter : public VP8Encoder {
void OnDroppedFrame() override;
- int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
const char* ImplementationName() const override;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index 9a7e1b2e7ca..aafcd797ac5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -107,34 +107,40 @@ TEST_F(TestSimulcastEncoderAdapter, DISABLED_TestRPSIEncoder) {
class MockVideoEncoder : public VideoEncoder {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
int32_t InitEncode(const VideoCodec* codecSettings,
int32_t numberOfCores,
- size_t maxPayloadSize) override {
+ size_t maxPayloadSize) /* override */ {
codec_ = *codecSettings;
return 0;
}
int32_t Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>* frame_types) override {
+ const std::vector<FrameType>* frame_types) /* override */ {
return 0;
}
int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override {
+ EncodedImageCallback* callback) /* override */ {
callback_ = callback;
return 0;
}
- int32_t Release() override { return 0; }
+ int32_t Release() /* override */ { return 0; }
- int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
+ int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) /* override */ {
return 0;
}
MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
- bool SupportsNativeHandle() const override { return supports_native_handle_; }
+ bool SupportsNativeHandle() const /* override */ {
+ return supports_native_handle_;
+ }
virtual ~MockVideoEncoder() {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index 2b2aa5de69f..b277ad2ee48 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -119,13 +119,13 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
int32_t Decoded(VideoFrame& decoded_image) override {
for (int i = 0; i < decoded_image.width(); ++i) {
- EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
+ EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
}
// TODO(mikhal): Verify the difference between U,V and the original.
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
- EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4);
- EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4);
+ EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
+ EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
}
decoded_frames_++;
return 0;
@@ -168,7 +168,7 @@ class SkipEncodingUnusedStreamsTest {
virtual ~SpyingTemporalLayers() { delete layers_; }
- virtual int EncodeFlags(uint32_t timestamp) {
+ int EncodeFlags(uint32_t timestamp) override {
return layers_->EncodeFlags(timestamp);
}
@@ -222,26 +222,40 @@ class TestVp8Simulcast : public ::testing::Test {
TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
: encoder_(encoder), decoder_(decoder) {}
- // Creates an VideoFrame from |plane_colors|.
- static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
- for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
- int width =
- (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width());
- int height =
- (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
- PlaneType plane_type = static_cast<PlaneType>(plane_num);
- uint8_t* data = frame->buffer(plane_type);
+ static void SetPlane(uint8_t* data,
+ uint8_t value,
+ int width,
+ int height,
+ int stride) {
+ for (int i = 0; i < height; i++, data += stride) {
// Setting allocated area to zero - setting only image size to
// requested values - will make it easier to distinguish between image
// size and frame size (accounting for stride).
- memset(frame->buffer(plane_type), 0, frame->allocated_size(plane_type));
- for (int i = 0; i < height; i++) {
- memset(data, plane_colors[plane_num], width);
- data += frame->stride(plane_type);
- }
+ memset(data, value, width);
+ memset(data + width, 0, stride - width);
}
}
+ // Fills in an VideoFrameBuffer from |plane_colors|.
+ static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ int plane_colors[kNumOfPlanes]) {
+ int width = buffer->width();
+ int height = buffer->height();
+ int chroma_width = (width + 1) / 2;
+ int chroma_height = (height + 1) / 2;
+
+ SetPlane(buffer->MutableDataY(), plane_colors[0],
+ width, height, buffer->StrideY());
+
+ SetPlane(buffer->MutableDataU(), plane_colors[1],
+ chroma_width, chroma_height,
+ buffer->StrideU());
+
+ SetPlane(buffer->MutableDataV(), plane_colors[2],
+ chroma_width, chroma_height,
+ buffer->StrideV());
+ }
+
static void DefaultSettings(VideoCodec* settings,
const int* temporal_layer_profile) {
assert(settings);
@@ -305,11 +319,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (kDefaultWidth + 1) / 2;
input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
}
@@ -555,11 +569,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
// The for loop above did not set the bitrate of the highest layer.
@@ -596,11 +610,11 @@ class TestVp8Simulcast : public ::testing::Test {
half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
}
@@ -691,7 +705,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
int picture_id = -1;
@@ -707,7 +721,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -715,7 +729,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -724,7 +738,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -739,7 +753,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
@@ -898,7 +912,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -906,7 +920,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index f3ebfa1f766..d562dd4bb97 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -14,9 +14,9 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -159,8 +159,8 @@ class TestVp8Impl : public ::testing::Test {
}
size_t WaitForEncodedFrame() const {
- int64_t startTime = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs) {
+ int64_t startTime = rtc::TimeMillis();
+ while (rtc::TimeMillis() - startTime < kMaxWaitEncTimeMs) {
if (encode_complete_callback_->EncodeComplete()) {
return encoded_frame_._length;
}
@@ -169,8 +169,8 @@ class TestVp8Impl : public ::testing::Test {
}
size_t WaitForDecodedFrame() const {
- int64_t startTime = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs) {
+ int64_t startTime = rtc::TimeMillis();
+ while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
if (decode_complete_callback_->DecodeComplete()) {
return CalcBufferSize(kI420, decoded_frame_.width(),
decoded_frame_.height());
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index b34288632cd..f035568355d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -20,6 +20,7 @@
#include "libyuv/convert.h" // NOLINT
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@@ -29,7 +30,6 @@
#include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -166,7 +166,7 @@ VP8EncoderImpl::VP8EncoderImpl()
tl1_frame_dropper_(kTl1MaxTimeToDropFrames),
key_frame_request_(kMaxSimulcastStreams, false),
quality_scaler_enabled_(false) {
- uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+ uint32_t seed = rtc::Time32();
srand(seed);
picture_id_.reserve(kMaxSimulcastStreams);
@@ -598,14 +598,9 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
rps_.Init();
- // Disable both high-QP limits and framedropping. Both are handled by libvpx
- // internally.
- const int kDisabledBadQpThreshold = 64;
- // TODO(glaznev/sprang): consider passing codec initial bitrate to quality
- // scaler to avoid starting with HD for low initial bitrates.
- quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, false, 0, 0, 0,
- codec_.maxFramerate);
+ quality_scaler_.Init(QualityScaler::kLowVp8QpThreshold,
+ QualityScaler::kBadVp8QpThreshold, codec_.startBitrate,
+ codec_.width, codec_.height, codec_.maxFramerate);
// Only apply scaling to improve for single-layer streams. The scaling metrics
// use frame drops as a signal and is only applicable when we drop frames.
@@ -751,15 +746,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] =
- const_cast<uint8_t*>(input_image.buffer(kYPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
raw_images_[0].planes[VPX_PLANE_U] =
- const_cast<uint8_t*>(input_image.buffer(kUPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
raw_images_[0].planes[VPX_PLANE_V] =
- const_cast<uint8_t*>(input_image.buffer(kVPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
- raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
- raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane);
- raw_images_[0].stride[VPX_PLANE_V] = input_image.stride(kVPlane);
+ raw_images_[0].stride[VPX_PLANE_Y] =
+ input_image.video_frame_buffer()->StrideY();
+ raw_images_[0].stride[VPX_PLANE_U] =
+ input_image.video_frame_buffer()->StrideU();
+ raw_images_[0].stride[VPX_PLANE_V] =
+ input_image.video_frame_buffer()->StrideV();
for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
@@ -1020,6 +1018,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
encoded_images_[encoder_idx].capture_time_ms_ =
input_image.render_time_ms();
+ encoded_images_[encoder_idx].rotation_ = input_image.rotation();
int qp = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
@@ -1053,9 +1052,9 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
}
if (encoders_.size() == 1 && send_stream_[0]) {
if (encoded_images_[0]._length > 0) {
- int qp;
- vpx_codec_control(&encoders_[0], VP8E_GET_LAST_QUANTIZER_64, &qp);
- quality_scaler_.ReportQP(qp);
+ int qp_128;
+ vpx_codec_control(&encoders_[0], VP8E_GET_LAST_QUANTIZER, &qp_128);
+ quality_scaler_.ReportQP(qp_128);
} else {
quality_scaler_.ReportDroppedFrame();
}
@@ -1355,9 +1354,12 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
- decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
- decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
- decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
+ decoded_image.video_frame_buffer()->MutableDataY(),
+ decoded_image.video_frame_buffer()->StrideY(),
+ decoded_image.video_frame_buffer()->MutableDataU(),
+ decoded_image.video_frame_buffer()->StrideU(),
+ decoded_image.video_frame_buffer()->MutableDataV(),
+ decoded_image.video_frame_buffer()->StrideV(),
img->d_w, img->d_h);
decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index 6906a322afc..f8af6422538 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -40,21 +40,21 @@ class VP8EncoderImpl : public VP8Encoder {
virtual ~VP8EncoderImpl();
- virtual int Release();
+ int Release() override;
- virtual int InitEncode(const VideoCodec* codec_settings,
- int number_of_cores,
- size_t max_payload_size);
+ int InitEncode(const VideoCodec* codec_settings,
+ int number_of_cores,
+ size_t max_payload_size) override;
- virtual int Encode(const VideoFrame& input_image,
- const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types);
+ int Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
- virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+ int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
- virtual int SetChannelParameters(uint32_t packet_loss, int64_t rtt);
+ int SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
- virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
+ int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate) override;
void OnDroppedFrame() override {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 33dae8d8e4c..28027009537 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -12,10 +12,10 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/include/video_image.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/tools/simple_command_line_parser.h"
@@ -158,7 +158,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
decoder->RegisterDecodeCompleteCallback(&decoder_callback);
// Read->Encode->Decode sequence.
// num_frames = -1 implies unlimited encoding (entire sequence).
- int64_t starttime = webrtc::TickTime::MillisecondTimestamp();
+ int64_t starttime = rtc::TimeMillis();
int frame_cnt = 1;
int frames_processed = 0;
input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
@@ -176,7 +176,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
++frame_cnt;
}
printf("\nProcessed %d frames\n", frames_processed);
- int64_t endtime = webrtc::TickTime::MillisecondTimestamp();
+ int64_t endtime = rtc::TimeMillis();
int64_t totalExecutionTime = endtime - starttime;
printf("Total execution time: %.2lf ms\n",
static_cast<double>(totalExecutionTime));
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
index 3bcbe46b3a8..3b726a0cc5d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
@@ -18,6 +18,7 @@ namespace webrtc {
class VP9Encoder : public VideoEncoder {
public:
+ static bool IsSupported();
static VP9Encoder* Create();
virtual ~VP9Encoder() {}
@@ -25,6 +26,7 @@ class VP9Encoder : public VideoEncoder {
class VP9Decoder : public VideoDecoder {
public:
+ static bool IsSupported();
static VP9Decoder* Create();
virtual ~VP9Decoder() {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
index cd5201f8aac..9124e5fad30 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
@@ -20,6 +20,21 @@
'<(libvpx_dir)/libvpx.gyp:libvpx',
],
}],
+ ['libvpx_build_vp9==1', {
+ 'sources': [
+ 'screenshare_layers.cc',
+ 'screenshare_layers.h',
+ 'vp9_frame_buffer_pool.cc',
+ 'vp9_frame_buffer_pool.h',
+ 'vp9_impl.cc',
+ 'vp9_impl.h',
+ ],
+ }, {
+ 'sources': [
+ 'vp9_noop.cc',
+ ],
+ }
+ ],
],
'dependencies': [
'<(webrtc_root)/common_video/common_video.gyp:common_video',
@@ -28,12 +43,6 @@
],
'sources': [
'include/vp9.h',
- 'screenshare_layers.cc',
- 'screenshare_layers.h',
- 'vp9_frame_buffer_pool.cc',
- 'vp9_frame_buffer_pool.h',
- 'vp9_impl.cc',
- 'vp9_impl.h',
],
},
],
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index fcefdb89fa2..750f7427cdd 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -22,13 +22,13 @@
#include "vpx/vp8dx.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -47,6 +47,10 @@ int GetCpuSpeed(int width, int height) {
#endif
}
+bool VP9Encoder::IsSupported() {
+ return true;
+}
+
VP9Encoder* VP9Encoder::Create() {
return new VP9EncoderImpl();
}
@@ -77,7 +81,7 @@ VP9EncoderImpl::VP9EncoderImpl()
// Use two spatial when screensharing with flexible mode.
spatial_layer_(new ScreenshareLayersVP9(2)) {
memset(&codec_, 0, sizeof(codec_));
- uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+ uint32_t seed = rtc::Time32();
srand(seed);
}
@@ -500,12 +504,15 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
- raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
- raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
- raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
- raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
- raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
- raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
+ raw_->planes[VPX_PLANE_Y] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
+ raw_->planes[VPX_PLANE_U] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
+ raw_->planes[VPX_PLANE_V] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
+ raw_->stride[VPX_PLANE_Y] = input_image.video_frame_buffer()->StrideY();
+ raw_->stride[VPX_PLANE_U] = input_image.video_frame_buffer()->StrideU();
+ raw_->stride[VPX_PLANE_V] = input_image.video_frame_buffer()->StrideV();
vpx_enc_frame_flags_t flags = 0;
bool send_keyframe = (frame_type == kVideoFrameKey);
@@ -692,8 +699,12 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
encoded_image_._timeStamp = input_image_->timestamp();
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
+ encoded_image_.rotation_ = input_image_->rotation();
encoded_image_._encodedHeight = raw_->d_h;
encoded_image_._encodedWidth = raw_->d_w;
+ int qp = -1;
+ vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
+ encoded_image_.qp_ = qp;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
}
@@ -816,6 +827,10 @@ const char* VP9EncoderImpl::ImplementationName() const {
return "libvpx";
}
+bool VP9Decoder::IsSupported() {
+ return true;
+}
+
VP9Decoder* VP9Decoder::Create() {
return new VP9DecoderImpl();
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc
new file mode 100644
index 00000000000..cc37e1adbbd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#if !defined(RTC_DISABLE_VP9)
+#error
+#endif // !defined(RTC_DISABLE_VP9)
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
+
+namespace webrtc {
+
+bool VP9Encoder::IsSupported() {
+ return false;
+}
+
+VP9Encoder* VP9Encoder::Create() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+bool VP9Decoder::IsSupported() {
+ return false;
+}
+
+VP9Decoder* VP9Decoder::Create() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc b/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc
deleted file mode 100644
index b2586fce3fb..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/content_metrics_processing.h"
-
-#include <math.h>
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-
-namespace webrtc {
-//////////////////////////////////
-/// VCMContentMetricsProcessing //
-//////////////////////////////////
-
-VCMContentMetricsProcessing::VCMContentMetricsProcessing()
- : recursive_avg_factor_(1 / 150.0f), // matched to 30fps.
- frame_cnt_uniform_avg_(0),
- avg_motion_level_(0.0f),
- avg_spatial_level_(0.0f) {
- recursive_avg_ = new VideoContentMetrics();
- uniform_avg_ = new VideoContentMetrics();
-}
-
-VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
- delete recursive_avg_;
- delete uniform_avg_;
-}
-
-int VCMContentMetricsProcessing::Reset() {
- recursive_avg_->Reset();
- uniform_avg_->Reset();
- frame_cnt_uniform_avg_ = 0;
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
- if (frameRate == 0)
- frameRate = 1;
- // Update factor for recursive averaging.
- recursive_avg_factor_ = static_cast<float>(1000.0f) /
- static_cast<float>(frameRate * kQmMinIntervalMs);
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
- return recursive_avg_;
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
- if (frame_cnt_uniform_avg_ == 0) {
- return NULL;
- }
- // Two metrics are used: motion and spatial level.
- uniform_avg_->motion_magnitude =
- avg_motion_level_ / static_cast<float>(frame_cnt_uniform_avg_);
- uniform_avg_->spatial_pred_err =
- avg_spatial_level_ / static_cast<float>(frame_cnt_uniform_avg_);
- return uniform_avg_;
-}
-
-void VCMContentMetricsProcessing::ResetShortTermAvgData() {
- // Reset.
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- frame_cnt_uniform_avg_ = 0;
-}
-
-int VCMContentMetricsProcessing::UpdateContentData(
- const VideoContentMetrics* contentMetrics) {
- if (contentMetrics == NULL) {
- return VCM_OK;
- }
- return ProcessContent(contentMetrics);
-}
-
-int VCMContentMetricsProcessing::ProcessContent(
- const VideoContentMetrics* contentMetrics) {
- // Update the recursive averaged metrics: average is over longer window
- // of time: over QmMinIntervalMs ms.
- UpdateRecursiveAvg(contentMetrics);
- // Update the uniform averaged metrics: average is over shorter window
- // of time: based on ~RTCP reports.
- UpdateUniformAvg(contentMetrics);
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateUniformAvg(
- const VideoContentMetrics* contentMetrics) {
- // Update frame counter.
- frame_cnt_uniform_avg_ += 1;
- // Update averaged metrics: motion and spatial level are used.
- avg_motion_level_ += contentMetrics->motion_magnitude;
- avg_spatial_level_ += contentMetrics->spatial_pred_err;
- return;
-}
-
-void VCMContentMetricsProcessing::UpdateRecursiveAvg(
- const VideoContentMetrics* contentMetrics) {
- // Spatial metrics: 2x2, 1x2(H), 2x1(V).
- recursive_avg_->spatial_pred_err =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err;
-
- recursive_avg_->spatial_pred_err_h =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_h +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
-
- recursive_avg_->spatial_pred_err_v =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_v +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
-
- // Motion metric: Derived from NFD (normalized frame difference).
- recursive_avg_->motion_magnitude =
- (1 - recursive_avg_factor_) * recursive_avg_->motion_magnitude +
- recursive_avg_factor_ * contentMetrics->motion_magnitude;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h b/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h
deleted file mode 100644
index 3f67ec19c98..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct VideoContentMetrics;
-
-// QM interval time (in ms)
-enum { kQmMinIntervalMs = 10000 };
-
-// Flag for NFD metric vs motion metric
-enum { kNfdMetric = 1 };
-
-/**********************************/
-/* Content Metrics Processing */
-/**********************************/
-class VCMContentMetricsProcessing {
- public:
- VCMContentMetricsProcessing();
- ~VCMContentMetricsProcessing();
-
- // Update class with latest metrics.
- int UpdateContentData(const VideoContentMetrics* contentMetrics);
-
- // Reset the short-term averaged content data.
- void ResetShortTermAvgData();
-
- // Initialize.
- int Reset();
-
- // Inform class of current frame rate.
- void UpdateFrameRate(uint32_t frameRate);
-
- // Returns the long-term averaged content data: recursive average over longer
- // time scale.
- VideoContentMetrics* LongTermAvgData();
-
- // Returns the short-term averaged content data: uniform average over
- // shorter time scalE.
- VideoContentMetrics* ShortTermAvgData();
-
- private:
- // Compute working average.
- int ProcessContent(const VideoContentMetrics* contentMetrics);
-
- // Update the recursive averaged metrics: longer time average (~5/10 secs).
- void UpdateRecursiveAvg(const VideoContentMetrics* contentMetrics);
-
- // Update the uniform averaged metrics: shorter time average (~RTCP report).
- void UpdateUniformAvg(const VideoContentMetrics* contentMetrics);
-
- VideoContentMetrics* recursive_avg_;
- VideoContentMetrics* uniform_avg_;
- float recursive_avg_factor_;
- uint32_t frame_cnt_uniform_avg_;
- float avg_motion_level_;
- float avg_spatial_level_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
new file mode 100644
index 00000000000..c6a1a06e756
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/frame_buffer2.h"
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace video_coding {
+
+namespace {
+// The maximum age of decoded frames tracked by frame buffer, compared to
+// |newest_picture_id_|.
+constexpr int kMaxFrameAge = 4096;
+
+// The maximum number of decoded frames being tracked by the frame buffer.
+constexpr int kMaxNumHistoryFrames = 256;
+} // namespace
+
+bool FrameBuffer::FrameComp::operator()(const FrameKey& f1,
+ const FrameKey& f2) const {
+ // first = picture id
+ // second = spatial layer
+ if (f1.first == f2.first)
+ return f1.second < f2.second;
+ return AheadOf(f2.first, f1.first);
+}
+
+FrameBuffer::FrameBuffer(Clock* clock,
+ VCMJitterEstimator* jitter_estimator,
+ const VCMTiming* timing)
+ : clock_(clock),
+ frame_inserted_event_(false, false),
+ jitter_estimator_(jitter_estimator),
+ timing_(timing),
+ newest_picture_id_(-1) {}
+
+std::unique_ptr<FrameObject> FrameBuffer::NextFrame(int64_t max_wait_time_ms) {
+ int64_t latest_return_time = clock_->TimeInMilliseconds() + max_wait_time_ms;
+ while (true) {
+ int64_t now = clock_->TimeInMilliseconds();
+ int64_t wait_ms = max_wait_time_ms;
+
+ crit_.Enter();
+ frame_inserted_event_.Reset();
+ auto next_frame = frames_.end();
+ for (auto frame_it = frames_.begin(); frame_it != frames_.end();
+ ++frame_it) {
+ const FrameObject& frame = *frame_it->second;
+ if (IsContinuous(frame)) {
+ next_frame = frame_it;
+ int64_t render_time = timing_->RenderTimeMs(frame.timestamp, now);
+ wait_ms = timing_->MaxWaitingTime(render_time, now);
+
+ // This will cause the frame buffer to prefer high framerate rather
+ // than high resolution in the case of the decoder not decoding fast
+ // enough and the stream has multiple spatial and temporal layers.
+ if (wait_ms == 0)
+ continue;
+
+ break;
+ }
+ }
+ crit_.Leave();
+
+ // If the timout occures, return. Otherwise a new frame has been inserted
+ // and the best frame to decode next will be selected again.
+ wait_ms = std::min<int64_t>(wait_ms, latest_return_time - now);
+ wait_ms = std::max<int64_t>(wait_ms, 0);
+ if (!frame_inserted_event_.Wait(wait_ms)) {
+ crit_.Enter();
+ if (next_frame != frames_.end()) {
+ // TODO(philipel): update jitter estimator with correct values.
+ jitter_estimator_->UpdateEstimate(100, 100);
+
+ decoded_frames_.insert(next_frame->first);
+ std::unique_ptr<FrameObject> frame = std::move(next_frame->second);
+ frames_.erase(frames_.begin(), ++next_frame);
+ crit_.Leave();
+ return frame;
+ } else {
+ crit_.Leave();
+ return std::unique_ptr<FrameObject>();
+ }
+ }
+ }
+}
+
+void FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
+ rtc::CritScope lock(&crit_);
+ if (newest_picture_id_ == -1)
+ newest_picture_id_ = frame->picture_id;
+
+ if (AheadOf<uint16_t>(frame->picture_id, newest_picture_id_))
+ newest_picture_id_ = frame->picture_id;
+
+ // Remove frames as long as we have too many, |kMaxNumHistoryFrames|.
+ while (decoded_frames_.size() > kMaxNumHistoryFrames)
+ decoded_frames_.erase(decoded_frames_.begin());
+
+ // Remove frames that are too old, |kMaxNumHistoryFrames|.
+ uint16_t old_picture_id = Subtract<1 << 16>(newest_picture_id_, kMaxFrameAge);
+ auto old_decoded_it =
+ decoded_frames_.lower_bound(FrameKey(old_picture_id, 0));
+ decoded_frames_.erase(decoded_frames_.begin(), old_decoded_it);
+
+ FrameKey key(frame->picture_id, frame->spatial_layer);
+ frames_[key] = std::move(frame);
+ frame_inserted_event_.Set();
+}
+
+bool FrameBuffer::IsContinuous(const FrameObject& frame) const {
+ // If a frame with an earlier picture id was inserted compared to the last
+ // decoded frames picture id then that frame arrived too late.
+ if (!decoded_frames_.empty() &&
+ AheadOf(decoded_frames_.rbegin()->first, frame.picture_id)) {
+ return false;
+ }
+
+ // Have we decoded all frames that this frame depend on?
+ for (size_t r = 0; r < frame.num_references; ++r) {
+ FrameKey ref_key(frame.references[r], frame.spatial_layer);
+ if (decoded_frames_.find(ref_key) == decoded_frames_.end())
+ return false;
+ }
+
+ // If this is a layer frame, have we decoded the lower layer of this
+ // super frame.
+ if (frame.inter_layer_predicted) {
+ RTC_DCHECK_GT(frame.spatial_layer, 0);
+ FrameKey ref_key(frame.picture_id, frame.spatial_layer - 1);
+ if (decoded_frames_.find(ref_key) == decoded_frames_.end())
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
new file mode 100644
index 00000000000..10cae426f62
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
+
+#include <array>
+#include <map>
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/thread_annotations.h"
+
+namespace webrtc {
+
+class Clock;
+class VCMJitterEstimator;
+class VCMTiming;
+
+namespace video_coding {
+
+class FrameObject;
+
+class FrameBuffer {
+ public:
+ FrameBuffer(Clock* clock,
+ VCMJitterEstimator* jitter_estimator,
+ const VCMTiming* timing);
+
+ // Insert a frame into the frame buffer.
+ void InsertFrame(std::unique_ptr<FrameObject> frame);
+
+ // Get the next frame for decoding. Will return at latest after
+ // |max_wait_time_ms|, with either a managed FrameObject or an empty
+ // unique ptr if there is no available frame for decoding.
+ std::unique_ptr<FrameObject> NextFrame(int64_t max_wait_time_ms);
+
+ private:
+ // FrameKey is a pair of (picture id, spatial layer).
+ using FrameKey = std::pair<uint16_t, uint8_t>;
+
+ // Comparator used to sort frames, first on their picture id, and second
+ // on their spatial layer.
+ struct FrameComp {
+ bool operator()(const FrameKey& f1, const FrameKey& f2) const;
+ };
+
+ // Determines whether a frame is continuous.
+ bool IsContinuous(const FrameObject& frame) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Keep track of decoded frames.
+ std::set<FrameKey, FrameComp> decoded_frames_ GUARDED_BY(crit_);
+
+ // The actual buffer that holds the FrameObjects.
+ std::map<FrameKey, std::unique_ptr<FrameObject>, FrameComp> frames_
+ GUARDED_BY(crit_);
+
+ rtc::CriticalSection crit_;
+ Clock* const clock_;
+ rtc::Event frame_inserted_event_;
+ VCMJitterEstimator* const jitter_estimator_;
+ const VCMTiming* const timing_;
+ int newest_picture_id_ GUARDED_BY(crit_);
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
+};
+
+} // namespace video_coding
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
new file mode 100644
index 00000000000..67706ce0581
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -0,0 +1,329 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/frame_buffer2.h"
+
+#include <algorithm>
+#include <cstring>
+#include <limits>
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/random.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace video_coding {
+
+class VCMTimingFake : public VCMTiming {
+ public:
+ explicit VCMTimingFake(Clock* clock) : VCMTiming(clock) {}
+
+ int64_t RenderTimeMs(uint32_t frame_timestamp,
+ int64_t now_ms) const override {
+ if (last_ms_ == -1) {
+ last_ms_ = now_ms + kDelayMs;
+ last_timestamp_ = frame_timestamp;
+ }
+
+ uint32_t diff = MinDiff(frame_timestamp, last_timestamp_);
+ if (AheadOf(frame_timestamp, last_timestamp_))
+ last_ms_ += diff / 90;
+ else
+ last_ms_ -= diff / 90;
+
+ last_timestamp_ = frame_timestamp;
+ return last_ms_;
+ }
+
+ uint32_t MaxWaitingTime(int64_t render_time_ms,
+ int64_t now_ms) const override {
+ return std::max<int>(0, render_time_ms - now_ms - kDecodeTime);
+ }
+
+ private:
+ static constexpr int kDelayMs = 50;
+ static constexpr int kDecodeTime = kDelayMs / 2;
+ mutable uint32_t last_timestamp_ = 0;
+ mutable int64_t last_ms_ = -1;
+};
+
+class VCMJitterEstimatorMock : public VCMJitterEstimator {
+ public:
+ explicit VCMJitterEstimatorMock(Clock* clock) : VCMJitterEstimator(clock) {}
+
+ MOCK_METHOD1(UpdateRtt, void(int64_t rttMs));
+ MOCK_METHOD3(UpdateEstimate,
+ void(int64_t frameDelayMs,
+ uint32_t frameSizeBytes,
+ bool incompleteFrame));
+};
+
+class FrameObjectMock : public FrameObject {
+ public:
+ MOCK_CONST_METHOD1(GetBitstream, bool(uint8_t* destination));
+};
+
+class TestFrameBuffer2 : public ::testing::Test {
+ protected:
+ static constexpr int kMaxReferences = 5;
+ static constexpr int kFps1 = 1000;
+ static constexpr int kFps10 = kFps1 / 10;
+ static constexpr int kFps20 = kFps1 / 20;
+
+ TestFrameBuffer2()
+ : clock_(0),
+ timing_(&clock_),
+ jitter_estimator_(&clock_),
+ buffer_(&clock_, &jitter_estimator_, &timing_),
+ rand_(0x34678213),
+ tear_down_(false),
+ extract_thread_(&ExtractLoop, this, "Extract Thread"),
+ trigger_extract_event_(false, false),
+ crit_acquired_event_(false, false) {}
+
+ void SetUp() override { extract_thread_.Start(); }
+
+ void TearDown() override {
+ tear_down_ = true;
+ trigger_extract_event_.Set();
+ extract_thread_.Stop();
+ }
+
+ template <typename... T>
+ void InsertFrame(uint16_t picture_id,
+ uint8_t spatial_layer,
+ int64_t ts_ms,
+ bool inter_layer_predicted,
+ T... refs) {
+ static_assert(sizeof...(refs) <= kMaxReferences,
+ "To many references specified for FrameObject.");
+ std::array<uint16_t, sizeof...(refs)> references = {{refs...}};
+
+ std::unique_ptr<FrameObjectMock> frame(new FrameObjectMock());
+ frame->picture_id = picture_id;
+ frame->spatial_layer = spatial_layer;
+ frame->timestamp = ts_ms * 90;
+ frame->num_references = references.size();
+ frame->inter_layer_predicted = inter_layer_predicted;
+ for (size_t r = 0; r < references.size(); ++r)
+ frame->references[r] = references[r];
+
+ buffer_.InsertFrame(std::move(frame));
+ }
+
+ void ExtractFrame(int64_t max_wait_time = 0) {
+ crit_.Enter();
+ if (max_wait_time == 0) {
+ frames_.emplace_back(buffer_.NextFrame(0));
+ crit_.Leave();
+ } else {
+ max_wait_time_ = max_wait_time;
+ trigger_extract_event_.Set();
+ crit_.Leave();
+ // Make sure |crit_| is aquired by |extract_thread_| before returning.
+ crit_acquired_event_.Wait(rtc::Event::kForever);
+ }
+ }
+
+ void CheckFrame(size_t index, int picture_id, int spatial_layer) {
+ rtc::CritScope lock(&crit_);
+ ASSERT_LT(index, frames_.size());
+ ASSERT_TRUE(frames_[index]);
+ ASSERT_EQ(picture_id, frames_[index]->picture_id);
+ ASSERT_EQ(spatial_layer, frames_[index]->spatial_layer);
+ }
+
+ void CheckNoFrame(size_t index) {
+ rtc::CritScope lock(&crit_);
+ ASSERT_LT(index, frames_.size());
+ ASSERT_FALSE(frames_[index]);
+ }
+
+ static bool ExtractLoop(void* obj) {
+ TestFrameBuffer2* tfb = static_cast<TestFrameBuffer2*>(obj);
+ while (true) {
+ tfb->trigger_extract_event_.Wait(rtc::Event::kForever);
+ {
+ rtc::CritScope lock(&tfb->crit_);
+ tfb->crit_acquired_event_.Set();
+ if (tfb->tear_down_)
+ return false;
+
+ tfb->frames_.emplace_back(tfb->buffer_.NextFrame(tfb->max_wait_time_));
+ }
+ }
+ }
+
+ uint32_t Rand() { return rand_.Rand<uint32_t>(); }
+
+ SimulatedClock clock_;
+ VCMTimingFake timing_;
+ VCMJitterEstimatorMock jitter_estimator_;
+ FrameBuffer buffer_;
+ std::vector<std::unique_ptr<FrameObject>> frames_;
+ Random rand_;
+
+ int64_t max_wait_time_;
+ bool tear_down_;
+ rtc::PlatformThread extract_thread_;
+ rtc::Event trigger_extract_event_;
+ rtc::Event crit_acquired_event_;
+ rtc::CriticalSection crit_;
+};
+
+TEST_F(TestFrameBuffer2, ExtractFromEmptyBuffer) {
+ ExtractFrame();
+ CheckNoFrame(0);
+}
+
+TEST_F(TestFrameBuffer2, WaitForFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ ExtractFrame(20);
+ InsertFrame(pid, 0, ts, false);
+ CheckFrame(0, pid, 0);
+}
+
+TEST_F(TestFrameBuffer2, OneSuperFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ ExtractFrame(20);
+ InsertFrame(pid, 1, ts, true);
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid, 1);
+}
+
+TEST_F(TestFrameBuffer2, OneLayerStream) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ CheckFrame(0, pid, 0);
+ for (int i = 1; i < 10; ++i) {
+ InsertFrame(pid + i, 0, ts + i * kFps10, false, pid + i - 1);
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ CheckFrame(i, pid + i, 0);
+ }
+}
+
+TEST_F(TestFrameBuffer2, OneLayerStreamReordered) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ CheckFrame(0, pid, 0);
+ for (int i = 1; i < 10; i += 2) {
+ ExtractFrame(15);
+ InsertFrame(pid + i + 1, 0, ts + (i + 1) * kFps10, false, pid + i);
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ InsertFrame(pid + i, 0, ts + i * kFps10, false, pid + i - 1);
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ ExtractFrame();
+ CheckFrame(i, pid + i, 0);
+ CheckFrame(i + 1, pid + i + 1, 0);
+ }
+}
+
+TEST_F(TestFrameBuffer2, DropTemporalLayerSlowDecoder) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ InsertFrame(pid + 1, 0, ts + kFps20, false);
+ for (int i = 2; i < 10; i += 2) {
+ uint32_t ts_tl0 = ts + i / 2 * kFps10;
+ InsertFrame(pid + i, 0, ts_tl0, false, pid + i - 2);
+ InsertFrame(pid + i + 1, 0, ts_tl0 + kFps20, false, pid + i, pid + i - 1);
+ }
+
+ for (int i = 0; i < 10; ++i) {
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(60);
+ }
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid + 1, 0);
+ CheckFrame(2, pid + 2, 0);
+ CheckFrame(3, pid + 4, 0);
+ CheckFrame(4, pid + 6, 0);
+ CheckFrame(5, pid + 8, 0);
+ CheckNoFrame(6);
+ CheckNoFrame(7);
+ CheckNoFrame(8);
+ CheckNoFrame(9);
+}
+
+TEST_F(TestFrameBuffer2, DropSpatialLayerSlowDecoder) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ InsertFrame(pid, 1, ts, false);
+ for (int i = 1; i < 6; ++i) {
+ uint32_t ts_tl0 = ts + i * kFps10;
+ InsertFrame(pid + i, 0, ts_tl0, false, pid + i - 1);
+ InsertFrame(pid + i, 1, ts_tl0, false, pid + i - 1);
+ }
+
+ ExtractFrame();
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(55);
+ for (int i = 2; i < 12; ++i) {
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(55);
+ }
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid, 1);
+ CheckFrame(2, pid + 1, 0);
+ CheckFrame(3, pid + 1, 1);
+ CheckFrame(4, pid + 2, 0);
+ CheckFrame(5, pid + 2, 1);
+ CheckFrame(6, pid + 3, 0);
+ CheckFrame(7, pid + 4, 0);
+ CheckFrame(8, pid + 5, 0);
+ CheckNoFrame(9);
+ CheckNoFrame(10);
+ CheckNoFrame(11);
+}
+
+TEST_F(TestFrameBuffer2, InsertLateFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ InsertFrame(pid + 2, 0, ts, false);
+ ExtractFrame();
+ InsertFrame(pid + 1, 0, ts, false, pid);
+ ExtractFrame();
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid + 2, 0);
+ CheckNoFrame(2);
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
index 363c8a70357..7b9ec0d2835 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
@@ -15,33 +15,56 @@
namespace webrtc {
namespace video_coding {
+FrameObject::FrameObject()
+ : picture_id(0),
+ spatial_layer(0),
+ timestamp(0),
+ num_references(0),
+ inter_layer_predicted(false) {}
+
RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
- uint16_t picture_id,
- uint16_t first_packet,
- uint16_t last_packet)
+ uint16_t first_seq_num,
+ uint16_t last_seq_num)
: packet_buffer_(packet_buffer),
- first_packet_(first_packet),
- last_packet_(last_packet) {}
+ first_seq_num_(first_seq_num),
+ last_seq_num_(last_seq_num) {
+ VCMPacket* packet = packet_buffer_->GetPacket(first_seq_num);
+ if (packet) {
+ frame_type_ = packet->frameType;
+ codec_type_ = packet->codec;
+ }
+}
RtpFrameObject::~RtpFrameObject() {
packet_buffer_->ReturnFrame(this);
}
-uint16_t RtpFrameObject::first_packet() const {
- return first_packet_;
+uint16_t RtpFrameObject::first_seq_num() const {
+ return first_seq_num_;
+}
+
+uint16_t RtpFrameObject::last_seq_num() const {
+ return last_seq_num_;
}
-uint16_t RtpFrameObject::last_packet() const {
- return last_packet_;
+FrameType RtpFrameObject::frame_type() const {
+ return frame_type_;
}
-uint16_t RtpFrameObject::picture_id() const {
- return picture_id_;
+VideoCodecType RtpFrameObject::codec_type() const {
+ return codec_type_;
}
bool RtpFrameObject::GetBitstream(uint8_t* destination) const {
return packet_buffer_->GetBitstream(*this, destination);
}
+RTPVideoTypeHeader* RtpFrameObject::GetCodecHeader() const {
+ VCMPacket* packet = packet_buffer_->GetPacket(first_seq_num_);
+ if (!packet)
+ return nullptr;
+ return &packet->codecSpecificHeader.codecHeader;
+}
+
} // namespace video_coding
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.h b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
index 2a68293d638..e8bb4811e6e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.h
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
@@ -11,16 +11,31 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_OBJECT_H_
#define WEBRTC_MODULES_VIDEO_CODING_FRAME_OBJECT_H_
-#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace video_coding {
class FrameObject {
public:
- virtual uint16_t picture_id() const = 0;
+ static const uint8_t kMaxFrameReferences = 5;
+
+ FrameObject();
+
virtual bool GetBitstream(uint8_t* destination) const = 0;
virtual ~FrameObject() {}
+
+ // The tuple (|picture_id|, |spatial_layer|) uniquely identifies a frame
+ // object. For codec types that don't necessarily have picture ids they
+ // have to be constructed from the header data relevant to that codec.
+ uint16_t picture_id;
+ uint8_t spatial_layer;
+ uint32_t timestamp;
+
+ size_t num_references;
+ uint16_t references[kMaxFrameReferences];
+ bool inter_layer_predicted;
};
class PacketBuffer;
@@ -28,20 +43,23 @@ class PacketBuffer;
class RtpFrameObject : public FrameObject {
public:
RtpFrameObject(PacketBuffer* packet_buffer,
- uint16_t picture_id,
- uint16_t first_packet,
- uint16_t last_packet);
+ uint16_t first_seq_num,
+ uint16_t last_seq_num);
+
~RtpFrameObject();
- uint16_t first_packet() const;
- uint16_t last_packet() const;
- uint16_t picture_id() const override;
+ uint16_t first_seq_num() const;
+ uint16_t last_seq_num() const;
+ FrameType frame_type() const;
+ VideoCodecType codec_type() const;
bool GetBitstream(uint8_t* destination) const override;
+ RTPVideoTypeHeader* GetCodecHeader() const;
private:
PacketBuffer* packet_buffer_;
- uint16_t picture_id_;
- uint16_t first_packet_;
- uint16_t last_packet_;
+ FrameType frame_type_;
+ VideoCodecType codec_type_;
+ uint16_t first_seq_num_;
+ uint16_t last_seq_num_;
};
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
index 2463cf5c7a5..abc6369a005 100644
--- a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
@@ -1,12 +1,12 @@
/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
+* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
#include "webrtc/modules/video_coding/generic_encoder.h"
@@ -21,89 +21,16 @@
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
-namespace {
-// Map information from info into rtp. If no relevant information is found
-// in info, rtp is set to NULL.
-void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
- RTC_DCHECK(info);
- switch (info->codecType) {
- case kVideoCodecVP8: {
- rtp->codec = kRtpVideoVp8;
- rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
- rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
- rtp->codecHeader.VP8.nonReference = info->codecSpecific.VP8.nonReference;
- rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
- rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
- rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
- rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
- rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
- return;
- }
- case kVideoCodecVP9: {
- rtp->codec = kRtpVideoVp9;
- rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
- rtp->codecHeader.VP9.inter_pic_predicted =
- info->codecSpecific.VP9.inter_pic_predicted;
- rtp->codecHeader.VP9.flexible_mode =
- info->codecSpecific.VP9.flexible_mode;
- rtp->codecHeader.VP9.ss_data_available =
- info->codecSpecific.VP9.ss_data_available;
- rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
- rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
- rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
- rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
- rtp->codecHeader.VP9.temporal_up_switch =
- info->codecSpecific.VP9.temporal_up_switch;
- rtp->codecHeader.VP9.inter_layer_predicted =
- info->codecSpecific.VP9.inter_layer_predicted;
- rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
- rtp->codecHeader.VP9.num_spatial_layers =
- info->codecSpecific.VP9.num_spatial_layers;
-
- if (info->codecSpecific.VP9.ss_data_available) {
- rtp->codecHeader.VP9.spatial_layer_resolution_present =
- info->codecSpecific.VP9.spatial_layer_resolution_present;
- if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
- for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
- ++i) {
- rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
- rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
- }
- }
- rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
- }
-
- rtp->codecHeader.VP9.num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
- for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i)
- rtp->codecHeader.VP9.pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
- return;
- }
- case kVideoCodecH264:
- rtp->codec = kRtpVideoH264;
- return;
- case kVideoCodecGeneric:
- rtp->codec = kRtpVideoGeneric;
- rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
- return;
- default:
- return;
- }
-}
-} // namespace
-
-// #define DEBUG_ENCODER_BIT_STREAM
-
VCMGenericEncoder::VCMGenericEncoder(
VideoEncoder* encoder,
VideoEncoderRateObserver* rate_observer,
VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource)
+ bool internal_source)
: encoder_(encoder),
rate_observer_(rate_observer),
vcm_encoded_frame_callback_(encoded_frame_callback),
- internal_source_(internalSource),
+ internal_source_(internal_source),
encoder_params_({0, 0, 0, 0}),
- rotation_(kVideoRotation_0),
is_screenshare_(false) {}
VCMGenericEncoder::~VCMGenericEncoder() {}
@@ -114,8 +41,8 @@ int32_t VCMGenericEncoder::Release() {
}
int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize) {
+ int32_t number_of_cores,
+ size_t max_payload_size) {
TRACE_EVENT0("webrtc", "VCMGenericEncoder::InitEncode");
{
rtc::CritScope lock(&params_lock_);
@@ -124,7 +51,7 @@ int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
}
is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing;
- if (encoder_->InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
+ if (encoder_->InitEncode(settings, number_of_cores, max_payload_size) != 0) {
LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
"payload name: "
<< settings->plName;
@@ -134,40 +61,30 @@ int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
return 0;
}
-int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes) {
+int32_t VCMGenericEncoder::Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific,
+ const std::vector<FrameType>& frame_types) {
TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
- inputFrame.timestamp());
+ frame.timestamp());
- for (FrameType frame_type : frameTypes)
+ for (FrameType frame_type : frame_types)
RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
- rotation_ = inputFrame.rotation();
-
- // Keep track of the current frame rotation and apply to the output of the
- // encoder. There might not be exact as the encoder could have one frame delay
- // but it should be close enough.
- // TODO(pbos): Map from timestamp, this is racy (even if rotation_ is locked
- // properly, which it isn't). More than one frame may be in the pipeline.
- vcm_encoded_frame_callback_->SetRotation(rotation_);
-
- int32_t result = encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
-
- if (vcm_encoded_frame_callback_) {
- vcm_encoded_frame_callback_->SignalLastEncoderImplementationUsed(
- encoder_->ImplementationName());
- }
+ int32_t result = encoder_->Encode(frame, codec_specific, &frame_types);
if (is_screenshare_ &&
result == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT) {
// Target bitrate exceeded, encoder state has been reset - try again.
- return encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
+ return encoder_->Encode(frame, codec_specific, &frame_types);
}
return result;
}
+const char* VCMGenericEncoder::ImplementationName() const {
+ return encoder_->ImplementationName();
+}
+
void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
bool channel_parameters_have_changed;
bool rates_have_changed;
@@ -186,7 +103,7 @@ void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
if (rates_have_changed) {
uint32_t target_bitrate_kbps = (params.target_bitrate + 500) / 1000;
encoder_->SetRates(target_bitrate_kbps, params.input_frame_rate);
- if (rate_observer_ != nullptr) {
+ if (rate_observer_) {
rate_observer_->OnSetRates(params.target_bitrate,
params.input_frame_rate);
}
@@ -220,93 +137,32 @@ bool VCMGenericEncoder::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
-int VCMGenericEncoder::GetTargetFramerate() {
- return encoder_->GetTargetFramerate();
-}
-
-/***************************
- * Callback Implementation
- ***************************/
VCMEncodedFrameCallback::VCMEncodedFrameCallback(
- EncodedImageCallback* post_encode_callback)
- : send_callback_(),
- _mediaOpt(NULL),
- _payloadType(0),
- _internalSource(false),
- _rotation(kVideoRotation_0),
- post_encode_callback_(post_encode_callback)
-#ifdef DEBUG_ENCODER_BIT_STREAM
- ,
- _bitStreamAfterEncoder(NULL)
-#endif
-{
-#ifdef DEBUG_ENCODER_BIT_STREAM
- _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
-#endif
-}
+ EncodedImageCallback* post_encode_callback,
+ media_optimization::MediaOptimization* media_opt)
+ : internal_source_(false),
+ post_encode_callback_(post_encode_callback),
+ media_opt_(media_opt) {}
-VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {
-#ifdef DEBUG_ENCODER_BIT_STREAM
- fclose(_bitStreamAfterEncoder);
-#endif
-}
-
-int32_t VCMEncodedFrameCallback::SetTransportCallback(
- VCMPacketizationCallback* transport) {
- send_callback_ = transport;
- return VCM_OK;
-}
+VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {}
int32_t VCMEncodedFrameCallback::Encoded(
const EncodedImage& encoded_image,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentationHeader) {
+ const CodecSpecificInfo* codec_specific,
+ const RTPFragmentationHeader* fragmentation_header) {
TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
"timestamp", encoded_image._timeStamp);
- post_encode_callback_->Encoded(encoded_image, NULL, NULL);
-
- if (send_callback_ == NULL) {
- return VCM_UNINITIALIZED;
- }
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- if (_bitStreamAfterEncoder != NULL) {
- fwrite(encoded_image._buffer, 1, encoded_image._length,
- _bitStreamAfterEncoder);
- }
-#endif
-
- RTPVideoHeader rtpVideoHeader;
- memset(&rtpVideoHeader, 0, sizeof(RTPVideoHeader));
- RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
- if (codecSpecificInfo) {
- CopyCodecSpecific(codecSpecificInfo, rtpVideoHeaderPtr);
- }
- rtpVideoHeader.rotation = _rotation;
-
- int32_t callbackReturn = send_callback_->SendData(
- _payloadType, encoded_image, fragmentationHeader, rtpVideoHeaderPtr);
- if (callbackReturn < 0) {
- return callbackReturn;
- }
-
- if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(encoded_image);
- if (_internalSource)
- return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame.
+ int ret_val = post_encode_callback_->Encoded(encoded_image, codec_specific,
+ fragmentation_header);
+ if (ret_val < 0)
+ return ret_val;
+
+ if (media_opt_) {
+ media_opt_->UpdateWithEncodedData(encoded_image);
+ if (internal_source_)
+ return media_opt_->DropFrame(); // Signal to encoder to drop next frame.
}
return VCM_OK;
}
-void VCMEncodedFrameCallback::SetMediaOpt(
- media_optimization::MediaOptimization* mediaOpt) {
- _mediaOpt = mediaOpt;
-}
-
-void VCMEncodedFrameCallback::SignalLastEncoderImplementationUsed(
- const char* implementation_name) {
- if (send_callback_)
- send_callback_->OnEncoderImplementationName(implementation_name);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
index e96d9957815..469f04ded40 100644
--- a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
@@ -33,60 +33,26 @@ struct EncoderParameters {
uint32_t input_frame_rate;
};
-/*************************************/
-/* VCMEncodeFrameCallback class */
-/***********************************/
class VCMEncodedFrameCallback : public EncodedImageCallback {
public:
- explicit VCMEncodedFrameCallback(
- EncodedImageCallback* post_encode_callback);
- virtual ~VCMEncodedFrameCallback();
-
- /*
- * Callback implementation - codec encode complete
- */
- int32_t Encoded(
- const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- const RTPFragmentationHeader* fragmentationHeader = NULL);
- /*
- * Callback implementation - generic encoder encode complete
- */
- int32_t SetTransportCallback(VCMPacketizationCallback* transport);
- /**
- * Set media Optimization
- */
- void SetMediaOpt(media_optimization::MediaOptimization* mediaOpt);
-
- void SetPayloadType(uint8_t payloadType) {
- _payloadType = payloadType;
- }
-
- void SetInternalSource(bool internalSource) {
- _internalSource = internalSource;
- }
-
- void SetRotation(VideoRotation rotation) { _rotation = rotation; }
- void SignalLastEncoderImplementationUsed(
- const char* encoder_implementation_name);
+ VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback,
+ media_optimization::MediaOptimization* media_opt);
+ virtual ~VCMEncodedFrameCallback();
+
+ // Implements EncodedImageCallback.
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific,
+ const RTPFragmentationHeader* fragmentation_header) override;
+ void SetInternalSource(bool internal_source) {
+ internal_source_ = internal_source;
+ }
private:
- VCMPacketizationCallback* send_callback_;
- media_optimization::MediaOptimization* _mediaOpt;
- uint8_t _payloadType;
- bool _internalSource;
- VideoRotation _rotation;
-
- EncodedImageCallback* post_encode_callback_;
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- FILE* _bitStreamAfterEncoder;
-#endif
-}; // end of VCMEncodeFrameCallback class
-
-/******************************/
-/* VCMGenericEncoder class */
-/******************************/
+ bool internal_source_;
+ EncodedImageCallback* const post_encode_callback_;
+ media_optimization::MediaOptimization* const media_opt_;
+};
+
class VCMGenericEncoder {
friend class VCMCodecDataBase;
@@ -94,44 +60,27 @@ class VCMGenericEncoder {
VCMGenericEncoder(VideoEncoder* encoder,
VideoEncoderRateObserver* rate_observer,
VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource);
+ bool internal_source);
~VCMGenericEncoder();
- /**
- * Free encoder memory
- */
int32_t Release();
- /**
- * Initialize the encoder with the information from the VideoCodec
- */
int32_t InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize);
- /**
- * Encode raw image
- * inputFrame : Frame containing raw image
- * codecSpecificInfo : Specific codec data
- * cameraFrameRate : Request or information from the remote side
- * frameType : The requested frame type to encode
- */
- int32_t Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes);
+ int32_t number_of_cores,
+ size_t max_payload_size);
+ int32_t Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific,
+ const std::vector<FrameType>& frame_types);
+
+ const char* ImplementationName() const;
void SetEncoderParameters(const EncoderParameters& params);
EncoderParameters GetEncoderParameters() const;
int32_t SetPeriodicKeyFrames(bool enable);
-
int32_t RequestFrame(const std::vector<FrameType>& frame_types);
-
bool InternalSource() const;
-
void OnDroppedFrame();
-
bool SupportsNativeHandle() const;
- int GetTargetFramerate();
-
private:
VideoEncoder* const encoder_;
VideoEncoderRateObserver* const rate_observer_;
@@ -139,9 +88,8 @@ class VCMGenericEncoder {
const bool internal_source_;
rtc::CriticalSection params_lock_;
EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
- VideoRotation rotation_;
bool is_screenshare_;
-}; // end of VCMGenericEncoder class
+};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h b/chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h
deleted file mode 100644
index ec58445ae27..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
-
-#include <functional>
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/rate_statistics.h"
-
-namespace webrtc {
-
-class Clock;
-
-// Certain hardware encoders tend to consistently overshoot the bitrate that
-// they are configured to encode at. This class estimates an adjusted bitrate
-// that when set on the encoder will produce the desired bitrate.
-class BitrateAdjuster {
- public:
- // min_adjusted_bitrate_pct and max_adjusted_bitrate_pct are the lower and
- // upper bound outputted adjusted bitrates as a percentage of the target
- // bitrate.
- BitrateAdjuster(Clock* clock,
- float min_adjusted_bitrate_pct,
- float max_adjusted_bitrate_pct);
- virtual ~BitrateAdjuster() {}
-
- static const uint32_t kBitrateUpdateIntervalMs;
- static const uint32_t kBitrateUpdateFrameInterval;
- static const float kBitrateTolerancePct;
- static const float kBytesPerMsToBitsPerSecond;
-
- // Sets the desired bitrate in bps (bits per second).
- // Should be called at least once before Update.
- void SetTargetBitrateBps(uint32_t bitrate_bps);
- uint32_t GetTargetBitrateBps() const;
-
- // Returns the adjusted bitrate in bps.
- uint32_t GetAdjustedBitrateBps() const;
-
- // Returns what we think the current bitrate is.
- uint32_t GetEstimatedBitrateBps();
-
- // This should be called after each frame is encoded. The timestamp at which
- // it is called is used to estimate the output bitrate of the encoder.
- // Should be called from only one thread.
- void Update(size_t frame_size);
-
- private:
- // Returns true if the bitrate is within kBitrateTolerancePct of bitrate_bps.
- bool IsWithinTolerance(uint32_t bitrate_bps, uint32_t target_bitrate_bps);
-
- // Returns smallest possible adjusted value.
- uint32_t GetMinAdjustedBitrateBps() const EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // Returns largest possible adjusted value.
- uint32_t GetMaxAdjustedBitrateBps() const EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- void Reset();
- void UpdateBitrate(uint32_t current_time_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- rtc::CriticalSection crit_;
- Clock* const clock_;
- const float min_adjusted_bitrate_pct_;
- const float max_adjusted_bitrate_pct_;
- // The bitrate we want.
- volatile uint32_t target_bitrate_bps_ GUARDED_BY(crit_);
- // The bitrate we use to get what we want.
- volatile uint32_t adjusted_bitrate_bps_ GUARDED_BY(crit_);
- // The target bitrate that the adjusted bitrate was computed from.
- volatile uint32_t last_adjusted_target_bitrate_bps_ GUARDED_BY(crit_);
- // Used to estimate bitrate.
- RateStatistics bitrate_tracker_ GUARDED_BY(crit_);
- // The last time we tried to adjust the bitrate.
- uint32_t last_bitrate_update_time_ms_ GUARDED_BY(crit_);
- // The number of frames since the last time we tried to adjust the bitrate.
- uint32_t frames_since_last_update_ GUARDED_BY(crit_);
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h b/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
index 0c508b7739a..0f8567963a0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
+++ b/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
@@ -31,6 +31,10 @@ namespace webrtc {
class Clock;
class EncodedImageCallback;
+// TODO(pbos): Remove VCMQMSettingsCallback completely. This might be done by
+// removing the VCM and use VideoSender/VideoReceiver as a public interface
+// directly.
+class VCMQMSettingsCallback;
class VideoEncoder;
class VideoDecoder;
struct CodecSpecificInfo;
@@ -78,7 +82,8 @@ class VideoCodingModule : public Module {
VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender);
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback);
static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
@@ -183,32 +188,6 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t SetReceiveChannelParameters(int64_t rtt) = 0;
- // Register a transport callback which will be called to deliver the encoded
- // data and
- // side information.
- //
- // Input:
- // - transport : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterTransportCallback(
- VCMPacketizationCallback* transport) = 0;
-
- // Register video output information callback which will be called to deliver
- // information
- // about the video stream produced by the encoder, for instance the average
- // frame rate and
- // bit rate.
- //
- // Input:
- // - outputInformation : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) = 0;
-
// Register a video protection callback which will be called to deliver
// the requested FEC rate and NACK status (on/off).
//
@@ -248,14 +227,13 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t AddVideoFrame(
const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
// Next frame encoded should be an intra frame (keyframe).
//
// Return value : VCM_OK, on success.
// < 0, on error.
- virtual int32_t IntraFrameRequest(int stream_index) = 0;
+ virtual int32_t IntraFrameRequest(size_t stream_index) = 0;
// Frame Dropper enable. Can be used to disable the frame dropping when the
// encoder
@@ -391,10 +369,6 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t Decode(uint16_t maxWaitTimeMs = 200) = 0;
- // Registers a callback which conveys the size of the render buffer.
- virtual int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) = 0;
-
// API to get the codec which is currently used for decoding by the module.
//
// Input:
@@ -511,8 +485,6 @@ class VideoCodingModule : public Module {
// suspended due to bandwidth limitations; otherwise false.
virtual bool VideoSuspended() const = 0;
- virtual void RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) = 0;
virtual void RegisterPostEncodeImageCallback(
EncodedImageCallback* post_encode_callback) = 0;
// Releases pending decode calls, permitting faster thread shutdown.
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h b/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
index 4fe8c797933..ba71803c7c9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
+#include <string>
#include <vector>
#include "webrtc/modules/include/module_common_types.h"
@@ -56,20 +57,6 @@ struct VCMFrameCount {
uint32_t numDeltaFrames;
};
-// Callback class used for sending data ready to be packetized
-class VCMPacketizationCallback {
- public:
- virtual int32_t SendData(uint8_t payloadType,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentationHeader,
- const RTPVideoHeader* rtpVideoHdr) = 0;
-
- virtual void OnEncoderImplementationName(const char* implementation_name) {}
-
- protected:
- virtual ~VCMPacketizationCallback() {}
-};
-
// Callback class used for passing decoded frames which are ready to be
// rendered.
class VCMReceiveCallback {
@@ -86,13 +73,13 @@ class VCMReceiveCallback {
virtual ~VCMReceiveCallback() {}
};
-// Callback class used for informing the user of the bit rate and frame rate
-// produced by the
-// encoder.
+// Callback class used for informing the user of the bit rate and frame rate,
+// and the name of the encoder.
class VCMSendStatisticsCallback {
public:
- virtual int32_t SendStatistics(const uint32_t bitRate,
- const uint32_t frameRate) = 0;
+ virtual void SendStatistics(uint32_t bitRate,
+ uint32_t frameRate,
+ const std::string& encoder_name) = 0;
protected:
virtual ~VCMSendStatisticsCallback() {}
@@ -189,30 +176,6 @@ class KeyFrameRequestSender {
virtual ~KeyFrameRequestSender() {}
};
-// Callback used to inform the user of the the desired resolution
-// as subscribed by Media Optimization (Quality Modes)
-class VCMQMSettingsCallback {
- public:
- virtual int32_t SetVideoQMSettings(const uint32_t frameRate,
- const uint32_t width,
- const uint32_t height) = 0;
-
- virtual void SetTargetFramerate(int frame_rate) = 0;
-
- protected:
- virtual ~VCMQMSettingsCallback() {}
-};
-
-// Callback class used for telling the user about the size (in time) of the
-// render buffer, that is the size in time of the complete continuous frames.
-class VCMRenderBufferSizeCallback {
- public:
- virtual void RenderBufferSizeMs(int buffer_size_ms) = 0;
-
- protected:
- virtual ~VCMRenderBufferSizeCallback() {}
-};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
index f048b0a883c..9c50a945191 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
@@ -601,7 +601,8 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
// Frame pulled out from jitter buffer, update the jitter estimate.
const bool retransmitted = (frame->GetNackCount() > 0);
if (retransmitted) {
- jitter_estimate_.FrameNacked();
+ if (WaitForRetransmissions())
+ jitter_estimate_.FrameNacked();
} else if (frame->Length() > 0) {
// Ignore retransmitted and empty frames.
if (waiting_for_completion_.latest_packet_time >= 0) {
@@ -958,6 +959,8 @@ void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
jitter_estimate_.UpdateRtt(rtt_ms);
if (nack_module_)
nack_module_->UpdateRtt(rtt_ms);
+ if (!WaitForRetransmissions())
+ jitter_estimate_.ResetNackCount();
}
void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
@@ -1194,19 +1197,6 @@ int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
return last_decoded_state_.time_stamp();
}
-void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
- uint32_t* timestamp_end) {
- CriticalSectionScoped cs(crit_sect_);
- CleanUpOldOrEmptyFrames();
- *timestamp_start = 0;
- *timestamp_end = 0;
- if (decodable_frames_.empty()) {
- return;
- }
- *timestamp_start = decodable_frames_.Front()->TimeStamp();
- *timestamp_end = decodable_frames_.Back()->TimeStamp();
-}
-
void VCMJitterBuffer::RegisterStatsCallback(
VCMReceiveStatisticsCallback* callback) {
CriticalSectionScoped cs(crit_sect_);
@@ -1282,9 +1272,13 @@ void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
if (frame.IsSessionComplete()) {
if (frame.FrameType() == kVideoFrameKey) {
++receive_statistics_.key_frames;
+ if (receive_statistics_.key_frames == 1) {
+ LOG(LS_INFO) << "Received first complete key frame";
+ }
} else {
++receive_statistics_.delta_frames;
}
+
if (stats_callback_ != NULL)
stats_callback_->OnFrameCountsUpdated(receive_statistics_);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
index 0cc03dd8107..e36f2cd6d52 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
@@ -212,10 +212,6 @@ class VCMJitterBuffer {
int64_t LastDecodedTimestamp() const;
VCMDecodeErrorMode decode_error_mode() const { return decode_error_mode_; }
- // Used to compute time of complete continuous frames. Returns the timestamps
- // corresponding to the start and end of the continuous complete buffer.
- void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
-
void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
int64_t TimeUntilNextProcess();
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
index df70ea98261..af9c20aaefe 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
@@ -195,7 +195,7 @@ class ProcessThreadMock : public ProcessThread {
MOCK_METHOD1(WakeUp, void(Module* module));
MOCK_METHOD1(RegisterModule, void(Module* module));
MOCK_METHOD1(DeRegisterModule, void(Module* module));
- void PostTask(rtc::scoped_ptr<ProcessTask> task) {}
+ void PostTask(std::unique_ptr<ProcessTask> task) {}
};
class TestBasicJitterBuffer : public ::testing::TestWithParam<std::string>,
@@ -215,7 +215,7 @@ class TestBasicJitterBuffer : public ::testing::TestWithParam<std::string>,
protected:
TestBasicJitterBuffer() : scoped_field_trial_(GetParam()) {}
- virtual void SetUp() {
+ void SetUp() override {
clock_.reset(new SimulatedClock(0));
jitter_buffer_.reset(new VCMJitterBuffer(
clock_.get(),
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
index 69cf757f2b4..42db2facf1c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
@@ -34,19 +34,10 @@ VCMProtectionMethod::VCMProtectionMethod()
_protectionFactorD(0),
_scaleProtKey(2.0f),
_maxPayloadSize(1460),
- _qmRobustness(new VCMQmRobustness()),
- _useUepProtectionK(false),
- _useUepProtectionD(true),
_corrFecCost(1.0),
_type(kNone) {}
-VCMProtectionMethod::~VCMProtectionMethod() {
- delete _qmRobustness;
-}
-void VCMProtectionMethod::UpdateContentMetrics(
- const VideoContentMetrics* contentMetrics) {
- _qmRobustness->UpdateContent(contentMetrics);
-}
+VCMProtectionMethod::~VCMProtectionMethod() {}
VCMNackFecMethod::VCMNackFecMethod(int64_t lowRttNackThresholdMs,
int64_t highRttNackThresholdMs)
@@ -333,17 +324,6 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
codeRateDelta = kPacketLossMax - 1;
}
- float adjustFec = 1.0f;
- // Avoid additional adjustments when layers are active.
- // TODO(mikhal/marco): Update adjusmtent based on layer info.
- if (parameters->numLayers == 1) {
- adjustFec = _qmRobustness->AdjustFecFactor(
- codeRateDelta, parameters->bitRate, parameters->frameRate,
- parameters->rtt, packetLoss);
- }
-
- codeRateDelta = static_cast<uint8_t>(codeRateDelta * adjustFec);
-
// For Key frame:
// Effectively at a higher rate, so we scale/boost the rate
// The boost factor may depend on several factors: ratio of packet
@@ -411,13 +391,6 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
_corrFecCost = 0.0f;
}
- // TODO(marpan): Set the UEP protection on/off for Key and Delta frames
- _useUepProtectionK = _qmRobustness->SetUepProtection(
- codeRateKey, parameters->bitRate, packetLoss, 0);
-
- _useUepProtectionD = _qmRobustness->SetUepProtection(
- codeRateDelta, parameters->bitRate, packetLoss, 1);
-
// DONE WITH FEC PROTECTION SETTINGS
return true;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
index 6b47e3b2d99..ad314aca8c2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
+++ b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
@@ -18,7 +18,6 @@
#include "webrtc/base/exp_filter.h"
#include "webrtc/modules/video_coding/internal_defines.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/typedefs.h"
@@ -45,6 +44,10 @@ enum FilterPacketLossMode {
// common to media optimization and the jitter buffer.
const int64_t kLowRttNackMs = 20;
+// If the RTT is higher than this an extra RTT wont be added to to the jitter
+// buffer delay.
+const int kMaxRttDelayThreshold = 500;
+
struct VCMProtectionParameters {
VCMProtectionParameters()
: rtt(0),
@@ -138,9 +141,6 @@ class VCMProtectionMethod {
virtual int MaxFramesFec() const { return 1; }
- // Updates content metrics
- void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
-
protected:
uint8_t _effectivePacketLoss;
uint8_t _protectionFactorK;
@@ -149,7 +149,6 @@ class VCMProtectionMethod {
float _scaleProtKey;
int32_t _maxPayloadSize;
- VCMQmRobustness* _qmRobustness;
bool _useUepProtectionK;
bool _useUepProtectionD;
float _corrFecCost;
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc b/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
index a234a06f9b4..d5fbadc122f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
@@ -11,8 +11,6 @@
#include "webrtc/modules/video_coding/media_optimization.h"
#include "webrtc/base/logging.h"
-#include "webrtc/modules/video_coding/content_metrics_processing.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -33,13 +31,6 @@ void UpdateProtectionCallback(
// Get the FEC code rate for Delta frames (set to 0 when NA).
delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
- // Get the FEC-UEP protection status for Key frames: UEP on/off.
- key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
-
- // Get the FEC-UEP protection status for Delta frames: UEP on/off.
- delta_fec_params.use_uep_protection =
- selected_method->RequiredUepProtectionD();
-
// The RTP module currently requires the same |max_fec_frames| for both
// key and delta frames.
delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
@@ -88,16 +79,11 @@ MediaOptimization::MediaOptimization(Clock* clock)
max_payload_size_(1460),
video_target_bitrate_(0),
incoming_frame_rate_(0),
- enable_qm_(false),
encoded_frame_samples_(),
avg_sent_bit_rate_bps_(0),
avg_sent_framerate_(0),
key_frame_cnt_(0),
delta_frame_cnt_(0),
- content_(new VCMContentMetricsProcessing()),
- qm_resolution_(new VCMQmResolution()),
- last_qm_update_time_(0),
- last_change_time_(0),
num_layers_(0),
suspension_enabled_(false),
video_suspended_(false),
@@ -120,8 +106,6 @@ void MediaOptimization::Reset() {
frame_dropper_->Reset();
loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
frame_dropper_->SetRates(0, 0);
- content_->Reset();
- qm_resolution_->Reset();
loss_prot_logic_->UpdateFrameRate(incoming_frame_rate_);
loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
send_statistics_zero_encode_ = 0;
@@ -131,8 +115,6 @@ void MediaOptimization::Reset() {
user_frame_rate_ = 0;
key_frame_cnt_ = 0;
delta_frame_cnt_ = 0;
- last_qm_update_time_ = 0;
- last_change_time_ = 0;
encoded_frame_samples_.clear();
avg_sent_bit_rate_bps_ = 0;
num_layers_ = 1;
@@ -160,12 +142,7 @@ void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
int num_layers,
int32_t mtu) {
// Everything codec specific should be reset here since this means the codec
- // has changed. If native dimension values have changed, then either user
- // initiated change, or QM initiated change. Will be able to determine only
- // after the processing of the first frame.
- last_change_time_ = clock_->TimeInMilliseconds();
- content_->Reset();
- content_->UpdateFrameRate(frame_rate);
+ // has changed.
max_bit_rate_ = max_bit_rate;
send_codec_type_ = send_codec_type;
@@ -182,16 +159,13 @@ void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
codec_height_ = height;
num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
max_payload_size_ = mtu;
- qm_resolution_->Initialize(target_bitrate_kbps, user_frame_rate_,
- codec_width_, codec_height_, num_layers_);
}
uint32_t MediaOptimization::SetTargetRates(
uint32_t target_bitrate,
uint8_t fraction_lost,
int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback) {
+ VCMProtectionCallback* protection_callback) {
CriticalSectionScoped lock(crit_sect_.get());
VCMProtectionMethod* selected_method = loss_prot_logic_->SelectedMethod();
float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
@@ -227,11 +201,7 @@ uint32_t MediaOptimization::SetTargetRates(
float protection_overhead_rate = 0.0f;
// Update protection settings, when applicable.
- float sent_video_rate_kbps = 0.0f;
if (loss_prot_logic_->SelectedType() != kNone) {
- // Update protection method with content metrics.
- selected_method->UpdateContentMetrics(content_->ShortTermAvgData());
-
// Update method will compute the robustness settings for the given
// protection method and the overhead cost
// the protection method is set by the user via SetVideoProtection.
@@ -265,7 +235,6 @@ uint32_t MediaOptimization::SetTargetRates(
// Get the effective packet loss for encoder ER when applicable. Should be
// passed to encoder via fraction_lost.
packet_loss_enc = selected_method->RequiredPacketLossER();
- sent_video_rate_kbps = static_cast<float>(sent_video_rate_bps) / 1000.0f;
}
// Source coding rate: total rate - protection overhead.
@@ -281,19 +250,6 @@ uint32_t MediaOptimization::SetTargetRates(
static_cast<float>(video_target_bitrate_) / 1000.0f;
frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
- if (enable_qm_ && qmsettings_callback) {
- // Update QM with rates.
- qm_resolution_->UpdateRates(target_video_bitrate_kbps, sent_video_rate_kbps,
- incoming_frame_rate_, fraction_lost_);
- // Check for QM selection.
- bool select_qm = CheckStatusForQMchange();
- if (select_qm) {
- SelectQuality(qmsettings_callback);
- }
- // Reset the short-term averaged content data.
- content_->ResetShortTermAvgData();
- }
-
CheckSuspendConditions();
return video_target_bitrate_;
@@ -367,11 +323,6 @@ int32_t MediaOptimization::UpdateWithEncodedData(
loss_prot_logic_->UpdatePacketsPerFrameKey(
min_packets_per_frame, clock_->TimeInMilliseconds());
}
-
- if (enable_qm_) {
- // Update quality select with encoded length.
- qm_resolution_->UpdateEncodedSize(encoded_length);
- }
}
if (!delta_frame && encoded_length > 0) {
loss_prot_logic_->UpdateKeyFrameSize(static_cast<float>(encoded_length));
@@ -388,11 +339,6 @@ int32_t MediaOptimization::UpdateWithEncodedData(
return VCM_OK;
}
-void MediaOptimization::EnableQM(bool enable) {
- CriticalSectionScoped lock(crit_sect_.get());
- enable_qm_ = enable;
-}
-
void MediaOptimization::EnableFrameDropper(bool enable) {
CriticalSectionScoped lock(crit_sect_.get());
frame_dropper_->Enable(enable);
@@ -424,19 +370,6 @@ bool MediaOptimization::DropFrame() {
return frame_dropper_->DropFrame();
}
-void MediaOptimization::UpdateContentData(
- const VideoContentMetrics* content_metrics) {
- CriticalSectionScoped lock(crit_sect_.get());
- // Updating content metrics.
- if (content_metrics == NULL) {
- // Disable QM if metrics are NULL.
- enable_qm_ = false;
- qm_resolution_->Reset();
- } else {
- content_->UpdateContentData(content_metrics);
- }
-}
-
void MediaOptimization::UpdateIncomingFrameRate() {
int64_t now = clock_->TimeInMilliseconds();
if (incoming_frame_times_[0] == 0) {
@@ -451,36 +384,6 @@ void MediaOptimization::UpdateIncomingFrameRate() {
ProcessIncomingFrameRate(now);
}
-int32_t MediaOptimization::SelectQuality(
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Reset quantities for QM select.
- qm_resolution_->ResetQM();
-
- // Update QM will long-term averaged content metrics.
- qm_resolution_->UpdateContent(content_->LongTermAvgData());
-
- // Select quality mode.
- VCMResolutionScale* qm = NULL;
- int32_t ret = qm_resolution_->SelectResolution(&qm);
- if (ret < 0) {
- return ret;
- }
-
- // Check for updates to spatial/temporal modes.
- QMUpdate(qm, video_qmsettings_callback);
-
- // Reset all the rate and related frame counters quantities.
- qm_resolution_->ResetRates();
-
- // Reset counters.
- last_qm_update_time_ = clock_->TimeInMilliseconds();
-
- // Reset content metrics.
- content_->Reset();
-
- return VCM_OK;
-}
-
void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
while (!encoded_frame_samples_.empty()) {
if (now_ms - encoded_frame_samples_.front().time_complete_ms >
@@ -527,65 +430,6 @@ void MediaOptimization::UpdateSentFramerate() {
}
}
-bool MediaOptimization::QMUpdate(
- VCMResolutionScale* qm,
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Check for no change.
- if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
- return false;
- }
-
- // Check for change in frame rate.
- if (qm->change_resolution_temporal) {
- incoming_frame_rate_ = qm->frame_rate;
- // Reset frame rate estimate.
- memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
- }
-
- // Check for change in frame size.
- if (qm->change_resolution_spatial) {
- codec_width_ = qm->codec_width;
- codec_height_ = qm->codec_height;
- }
-
- LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
- "to "
- << qm->codec_width << "x" << qm->codec_height << "@"
- << qm->frame_rate;
-
- // Update VPM with new target frame rate and frame size.
- // Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
- // target frame rate in VPM frame dropper. The quantity |_incoming_frame_rate|
- // will vary/fluctuate, and since we don't want to change the state of the
- // VPM frame dropper, unless a temporal action was selected, we use the
- // quantity |qm->frame_rate| for updating.
- video_qmsettings_callback->SetVideoQMSettings(qm->frame_rate, codec_width_,
- codec_height_);
- content_->UpdateFrameRate(qm->frame_rate);
- qm_resolution_->UpdateCodecParameters(qm->frame_rate, codec_width_,
- codec_height_);
- return true;
-}
-
-// Check timing constraints and look for significant change in:
-// (1) scene content,
-// (2) target bit rate.
-bool MediaOptimization::CheckStatusForQMchange() {
- bool status = true;
-
- // Check that we do not call QMSelect too often, and that we waited some time
- // (to sample the metrics) from the event last_change_time
- // last_change_time is the time where user changed the size/rate/frame rate
- // (via SetEncodingData).
- int64_t now = clock_->TimeInMilliseconds();
- if ((now - last_qm_update_time_) < kQmMinIntervalMs ||
- (now - last_change_time_) < kQmMinIntervalMs) {
- status = false;
- }
-
- return status;
-}
-
// Allowing VCM to keep track of incoming frame rate.
void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
int32_t num = 0;
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization.h b/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
index 060cd893ffe..081b2a900a2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
@@ -17,7 +17,6 @@
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/modules/video_coding/media_opt_util.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -59,11 +58,9 @@ class MediaOptimization {
uint32_t SetTargetRates(uint32_t target_bitrate,
uint8_t fraction_lost,
int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback);
+ VCMProtectionCallback* protection_callback);
void SetProtectionMethod(VCMProtectionMethodEnum method);
- void EnableQM(bool enable);
void EnableFrameDropper(bool enable);
// Lets the sender suspend video when the rate drops below
@@ -74,8 +71,6 @@ class MediaOptimization {
bool DropFrame();
- void UpdateContentData(const VideoContentMetrics* content_metrics);
-
// Informs Media Optimization of encoded output.
int32_t UpdateWithEncodedData(const EncodedImage& encoded_image);
@@ -98,19 +93,6 @@ class MediaOptimization {
void UpdateSentBitrate(int64_t now_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
void UpdateSentFramerate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Computes new Quality Mode.
- int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Verifies if QM settings differ from default, i.e. if an update is required.
- // Computes actual values, as will be sent to the encoder.
- bool QMUpdate(VCMResolutionScale* qm,
- VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Checks if we should make a QM change. Return true if yes, false otherwise.
- bool CheckStatusForQMchange() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
void ProcessIncomingFrameRate(int64_t now)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
@@ -152,16 +134,11 @@ class MediaOptimization {
int video_target_bitrate_ GUARDED_BY(crit_sect_);
float incoming_frame_rate_ GUARDED_BY(crit_sect_);
int64_t incoming_frame_times_[kFrameCountHistorySize] GUARDED_BY(crit_sect_);
- bool enable_qm_ GUARDED_BY(crit_sect_);
std::list<EncodedFrameSample> encoded_frame_samples_ GUARDED_BY(crit_sect_);
uint32_t avg_sent_bit_rate_bps_ GUARDED_BY(crit_sect_);
uint32_t avg_sent_framerate_ GUARDED_BY(crit_sect_);
uint32_t key_frame_cnt_ GUARDED_BY(crit_sect_);
uint32_t delta_frame_cnt_ GUARDED_BY(crit_sect_);
- std::unique_ptr<VCMContentMetricsProcessing> content_ GUARDED_BY(crit_sect_);
- std::unique_ptr<VCMQmResolution> qm_resolution_ GUARDED_BY(crit_sect_);
- int64_t last_qm_update_time_ GUARDED_BY(crit_sect_);
- int64_t last_change_time_ GUARDED_BY(crit_sect_); // Content/user triggered.
int num_layers_ GUARDED_BY(crit_sect_);
bool suspension_enabled_ GUARDED_BY(crit_sect_);
bool video_suspended_ GUARDED_BY(crit_sect_);
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
index 3f8ac5d0752..e6a1bcccd96 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
@@ -66,7 +66,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
media_opt_.EnableFrameDropper(true);
for (int time = 0; time < 2000; time += frame_time_ms_) {
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
@@ -76,7 +76,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps - 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to engage immediately and stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -89,7 +89,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps + 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -101,7 +101,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to disengage immediately.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -138,7 +138,7 @@ TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
// Using 10% of codec bitrate for FEC, should still be able to use all of it.
protection_callback.fec_rate_bps_ = kCodecBitrateBps / 10;
uint32_t target_bitrate = media_opt_.SetTargetRates(
- kMaxBitrateBps, 0, 0, &protection_callback, nullptr);
+ kMaxBitrateBps, 0, 0, &protection_callback);
EXPECT_EQ(kCodecBitrateBps, static_cast<int>(target_bitrate));
@@ -146,7 +146,7 @@ TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
// both equally, but only be half of max (since that ceiling should be hit).
protection_callback.fec_rate_bps_ = kCodecBitrateBps;
target_bitrate = media_opt_.SetTargetRates(kMaxBitrateBps, 128, 100,
- &protection_callback, nullptr);
+ &protection_callback);
EXPECT_EQ(kMaxBitrateBps / 2, static_cast<int>(target_bitrate));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
index 1b12afe0f0d..43244321ea0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
@@ -49,17 +49,17 @@ NackModule::NackModule(Clock* clock,
running_(true),
initialized_(false),
rtt_ms_(kDefaultRttMs),
- last_seq_num_(0),
+ newest_seq_num_(0),
next_process_time_ms_(-1) {
RTC_DCHECK(clock_);
RTC_DCHECK(nack_sender_);
RTC_DCHECK(keyframe_request_sender_);
}
-void NackModule::OnReceivedPacket(const VCMPacket& packet) {
+int NackModule::OnReceivedPacket(const VCMPacket& packet) {
rtc::CritScope lock(&crit_);
if (!running_)
- return;
+ return -1;
uint16_t seq_num = packet.seqNum;
// TODO(philipel): When the packet includes information whether it is
// retransmitted or not, use that value instead. For
@@ -69,40 +69,48 @@ void NackModule::OnReceivedPacket(const VCMPacket& packet) {
bool is_keyframe = packet.isFirstPacket && packet.frameType == kVideoFrameKey;
if (!initialized_) {
- last_seq_num_ = seq_num;
+ newest_seq_num_ = seq_num;
if (is_keyframe)
keyframe_list_.insert(seq_num);
initialized_ = true;
- return;
+ return 0;
}
- if (seq_num == last_seq_num_)
- return;
+ // Since the |newest_seq_num_| is a packet we have actually received we know
+ // that packet has never been Nacked.
+ if (seq_num == newest_seq_num_)
+ return 0;
- if (AheadOf(last_seq_num_, seq_num)) {
+ if (AheadOf(newest_seq_num_, seq_num)) {
// An out of order packet has been received.
- nack_list_.erase(seq_num);
+ auto nack_list_it = nack_list_.find(seq_num);
+ int nacks_sent_for_packet = 0;
+ if (nack_list_it != nack_list_.end()) {
+ nacks_sent_for_packet = nack_list_it->second.retries;
+ nack_list_.erase(nack_list_it);
+ }
if (!is_retransmitted)
UpdateReorderingStatistics(seq_num);
- return;
- } else {
- AddPacketsToNack(last_seq_num_ + 1, seq_num);
- last_seq_num_ = seq_num;
+ return nacks_sent_for_packet;
+ }
+ AddPacketsToNack(newest_seq_num_ + 1, seq_num);
+ newest_seq_num_ = seq_num;
- // Keep track of new keyframes.
- if (is_keyframe)
- keyframe_list_.insert(seq_num);
+ // Keep track of new keyframes.
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
- // And remove old ones so we don't accumulate keyframes.
- auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
- if (it != keyframe_list_.begin())
- keyframe_list_.erase(keyframe_list_.begin(), it);
+ // And remove old ones so we don't accumulate keyframes.
+ auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != keyframe_list_.begin())
+ keyframe_list_.erase(keyframe_list_.begin(), it);
- // Are there any nacks that are waiting for this seq_num.
- std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
- if (!nack_batch.empty())
- nack_sender_->SendNack(nack_batch);
- }
+ // Are there any nacks that are waiting for this seq_num.
+ std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
+ if (!nack_batch.empty())
+ nack_sender_->SendNack(nack_batch);
+
+ return 0;
}
void NackModule::ClearUpTo(uint16_t seq_num) {
@@ -215,7 +223,7 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
auto it = nack_list_.begin();
while (it != nack_list_.end()) {
if (consider_seq_num && it->second.sent_at_time == -1 &&
- AheadOrAt(last_seq_num_, it->second.send_at_seq_num)) {
+ AheadOrAt(newest_seq_num_, it->second.send_at_seq_num)) {
nack_batch.emplace_back(it->second.seq_num);
++it->second.retries;
it->second.sent_at_time = now_ms;
@@ -248,8 +256,8 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
}
void NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
- RTC_DCHECK(AheadOf(last_seq_num_, seq_num));
- uint16_t diff = ReverseDiff(last_seq_num_, seq_num);
+ RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
+ uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
reordering_histogram_.Add(diff);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.h b/chromium/third_party/webrtc/modules/video_coding/nack_module.h
index 7163a8e9054..58d6cfa985e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.h
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module.h
@@ -32,7 +32,7 @@ class NackModule : public Module {
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender);
- void OnReceivedPacket(const VCMPacket& packet);
+ int OnReceivedPacket(const VCMPacket& packet);
void ClearUpTo(uint16_t seq_num);
void UpdateRtt(int64_t rtt_ms);
void Clear();
@@ -59,11 +59,6 @@ class NackModule : public Module {
int64_t sent_at_time;
int retries;
};
-
- struct SeqNumComparator {
- bool operator()(uint16_t s1, uint16_t s2) const { return AheadOf(s2, s1); }
- };
-
void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end)
EXCLUSIVE_LOCKS_REQUIRED(crit_);
@@ -87,13 +82,15 @@ class NackModule : public Module {
NackSender* const nack_sender_;
KeyFrameRequestSender* const keyframe_request_sender_;
- std::map<uint16_t, NackInfo, SeqNumComparator> nack_list_ GUARDED_BY(crit_);
- std::set<uint16_t, SeqNumComparator> keyframe_list_ GUARDED_BY(crit_);
+ std::map<uint16_t, NackInfo, DescendingSeqNumComp<uint16_t>> nack_list_
+ GUARDED_BY(crit_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> keyframe_list_
+ GUARDED_BY(crit_);
video_coding::Histogram reordering_histogram_ GUARDED_BY(crit_);
bool running_ GUARDED_BY(crit_);
bool initialized_ GUARDED_BY(crit_);
int64_t rtt_ms_ GUARDED_BY(crit_);
- uint16_t last_seq_num_ GUARDED_BY(crit_);
+ uint16_t newest_seq_num_ GUARDED_BY(crit_);
int64_t next_process_time_ms_ GUARDED_BY(crit_);
};
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
index 3870742016a..9c2eb4ac0c3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
@@ -290,4 +290,28 @@ TEST_F(TestNackModule, ClearUpToWrap) {
EXPECT_EQ(0, sent_nacks_[0]);
}
+TEST_F(TestNackModule, PacketNackCount) {
+ VCMPacket packet;
+ packet.seqNum = 0;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 2;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 1;
+ EXPECT_EQ(1, nack_module_.OnReceivedPacket(packet));
+
+ sent_nacks_.clear();
+ nack_module_.UpdateRtt(100);
+ packet.seqNum = 5;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module_.Process();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module_.Process();
+ packet.seqNum = 3;
+ EXPECT_EQ(3, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 4;
+ EXPECT_EQ(3, nack_module_.OnReceivedPacket(packet));
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
index 0a05baa16ab..09fb2499074 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
@@ -14,8 +14,8 @@
#include <limits>
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#include "webrtc/modules/video_coding/frame_object.h"
-#include "webrtc/modules/video_coding/sequence_number_util.h"
namespace webrtc {
namespace video_coding {
@@ -25,12 +25,12 @@ PacketBuffer::PacketBuffer(size_t start_buffer_size,
OnCompleteFrameCallback* frame_callback)
: size_(start_buffer_size),
max_size_(max_buffer_size),
- last_seq_num_(0),
first_seq_num_(0),
- initialized_(false),
+ last_seq_num_(0),
+ first_packet_received_(false),
data_buffer_(start_buffer_size),
sequence_buffer_(start_buffer_size),
- frame_callback_(frame_callback) {
+ reference_finder_(frame_callback) {
RTC_DCHECK_LE(start_buffer_size, max_buffer_size);
// Buffer size must always be a power of 2.
RTC_DCHECK((start_buffer_size & (start_buffer_size - 1)) == 0);
@@ -40,12 +40,12 @@ PacketBuffer::PacketBuffer(size_t start_buffer_size,
bool PacketBuffer::InsertPacket(const VCMPacket& packet) {
rtc::CritScope lock(&crit_);
uint16_t seq_num = packet.seqNum;
- int index = seq_num % size_;
+ size_t index = seq_num % size_;
- if (!initialized_) {
+ if (!first_packet_received_) {
first_seq_num_ = seq_num - 1;
last_seq_num_ = seq_num;
- initialized_ = true;
+ first_packet_received_ = true;
}
if (sequence_buffer_[index].used) {
@@ -70,16 +70,17 @@ bool PacketBuffer::InsertPacket(const VCMPacket& packet) {
sequence_buffer_[index].frame_end = packet.markerBit;
sequence_buffer_[index].seq_num = packet.seqNum;
sequence_buffer_[index].continuous = false;
+ sequence_buffer_[index].frame_created = false;
sequence_buffer_[index].used = true;
data_buffer_[index] = packet;
- FindCompleteFrames(seq_num);
+ FindFrames(seq_num);
return true;
}
void PacketBuffer::ClearTo(uint16_t seq_num) {
rtc::CritScope lock(&crit_);
- int index = first_seq_num_ % size_;
+ size_t index = first_seq_num_ % size_;
while (AheadOf<uint16_t>(seq_num, first_seq_num_ + 1)) {
index = (index + 1) % size_;
first_seq_num_ = Add<1 << 16>(first_seq_num_, 1);
@@ -96,7 +97,7 @@ bool PacketBuffer::ExpandBufferSize() {
std::vector<ContinuityInfo> new_sequence_buffer(new_size);
for (size_t i = 0; i < size_; ++i) {
if (sequence_buffer_[i].used) {
- int index = sequence_buffer_[i].seq_num % new_size;
+ size_t index = sequence_buffer_[i].seq_num % new_size;
new_sequence_buffer[index] = sequence_buffer_[i];
new_data_buffer[index] = data_buffer_[i];
}
@@ -108,38 +109,47 @@ bool PacketBuffer::ExpandBufferSize() {
}
bool PacketBuffer::IsContinuous(uint16_t seq_num) const {
- int index = seq_num % size_;
+ size_t index = seq_num % size_;
int prev_index = index > 0 ? index - 1 : size_ - 1;
+
if (!sequence_buffer_[index].used)
return false;
+ if (sequence_buffer_[index].frame_created)
+ return false;
if (sequence_buffer_[index].frame_begin)
return true;
if (!sequence_buffer_[prev_index].used)
return false;
+ if (sequence_buffer_[prev_index].seq_num !=
+ static_cast<uint16_t>(seq_num - 1))
+ return false;
if (sequence_buffer_[prev_index].continuous)
return true;
return false;
}
-void PacketBuffer::FindCompleteFrames(uint16_t seq_num) {
- int index = seq_num % size_;
+void PacketBuffer::FindFrames(uint16_t seq_num) {
+ size_t index = seq_num % size_;
while (IsContinuous(seq_num)) {
sequence_buffer_[index].continuous = true;
- // If the frame is complete, find the first packet of the frame and
- // create a FrameObject.
+ // If all packets of the frame is continuous, find the first packet of the
+ // frame and create an RtpFrameObject.
if (sequence_buffer_[index].frame_end) {
- int rindex = index;
+ int start_index = index;
uint16_t start_seq_num = seq_num;
- while (!sequence_buffer_[rindex].frame_begin) {
- rindex = rindex > 0 ? rindex - 1 : size_ - 1;
+
+ while (!sequence_buffer_[start_index].frame_begin) {
+ sequence_buffer_[start_index].frame_created = true;
+ start_index = start_index > 0 ? start_index - 1 : size_ - 1;
start_seq_num--;
}
+ sequence_buffer_[start_index].frame_created = true;
- std::unique_ptr<FrameObject> frame(
- new RtpFrameObject(this, 1, start_seq_num, seq_num));
- frame_callback_->OnCompleteFrame(std::move(frame));
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(this, start_seq_num, seq_num));
+ reference_finder_.ManageFrame(std::move(frame));
}
index = (index + 1) % size_;
@@ -149,14 +159,13 @@ void PacketBuffer::FindCompleteFrames(uint16_t seq_num) {
void PacketBuffer::ReturnFrame(RtpFrameObject* frame) {
rtc::CritScope lock(&crit_);
- int index = frame->first_packet() % size_;
- int end = (frame->last_packet() + 1) % size_;
- uint16_t seq_num = frame->first_packet();
+ size_t index = frame->first_seq_num() % size_;
+ size_t end = (frame->last_seq_num() + 1) % size_;
+ uint16_t seq_num = frame->first_seq_num();
while (index != end) {
- if (sequence_buffer_[index].seq_num == seq_num) {
+ if (sequence_buffer_[index].seq_num == seq_num)
sequence_buffer_[index].used = false;
- sequence_buffer_[index].continuous = false;
- }
+
index = (index + 1) % size_;
++seq_num;
}
@@ -173,9 +182,9 @@ bool PacketBuffer::GetBitstream(const RtpFrameObject& frame,
uint8_t* destination) {
rtc::CritScope lock(&crit_);
- int index = frame.first_packet() % size_;
- int end = (frame.last_packet() + 1) % size_;
- uint16_t seq_num = frame.first_packet();
+ size_t index = frame.first_seq_num() % size_;
+ size_t end = (frame.last_seq_num() + 1) % size_;
+ uint16_t seq_num = frame.first_seq_num();
while (index != end) {
if (!sequence_buffer_[index].used ||
sequence_buffer_[index].seq_num != seq_num) {
@@ -192,12 +201,22 @@ bool PacketBuffer::GetBitstream(const RtpFrameObject& frame,
return true;
}
-void PacketBuffer::Flush() {
+VCMPacket* PacketBuffer::GetPacket(uint16_t seq_num) {
rtc::CritScope lock(&crit_);
- for (size_t i = 0; i < size_; ++i) {
- sequence_buffer_[i].used = false;
- sequence_buffer_[i].continuous = false;
+ size_t index = seq_num % size_;
+ if (!sequence_buffer_[index].used ||
+ seq_num != sequence_buffer_[index].seq_num) {
+ return nullptr;
}
+ return &data_buffer_[index];
+}
+
+void PacketBuffer::Clear() {
+ rtc::CritScope lock(&crit_);
+ for (size_t i = 0; i < size_; ++i)
+ sequence_buffer_[i].used = false;
+
+ first_packet_received_ = false;
}
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
index 6ca514536ef..ae0916a75f3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
@@ -14,9 +14,11 @@
#include <vector>
#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/rtp_frame_reference_finder.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
namespace webrtc {
namespace video_coding {
@@ -39,25 +41,50 @@ class PacketBuffer {
bool InsertPacket(const VCMPacket& packet);
void ClearTo(uint16_t seq_num);
- void Flush();
+ void Clear();
private:
friend RtpFrameObject;
// Since we want the packet buffer to be as packet type agnostic
// as possible we extract only the information needed in order
- // to determin whether a sequence of packets is continuous or not.
+ // to determine whether a sequence of packets is continuous or not.
struct ContinuityInfo {
+ // The sequence number of the packet.
uint16_t seq_num = 0;
+
+ // If this is the first packet of the frame.
bool frame_begin = false;
+
+ // If this is the last packet of the frame.
bool frame_end = false;
+
+ // If this slot is currently used.
bool used = false;
+
+ // If all its previous packets have been inserted into the packet buffer.
bool continuous = false;
+
+ // If this packet has been used to create a frame already.
+ bool frame_created = false;
};
+ // Tries to expand the buffer.
bool ExpandBufferSize() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Test if all previous packets has arrived for the given sequence number.
bool IsContinuous(uint16_t seq_num) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void FindCompleteFrames(uint16_t seq_num) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Test if all packets of a frame has arrived, and if so, creates a frame.
+ // May create multiple frames per invocation.
+ void FindFrames(uint16_t seq_num) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Copy the bitstream for |frame| to |destination|.
bool GetBitstream(const RtpFrameObject& frame, uint8_t* destination);
+
+ // Get the packet with sequence number |seq_num|.
+ VCMPacket* GetPacket(uint16_t seq_num);
+
+ // Mark all slots used by |frame| as not used.
void ReturnFrame(RtpFrameObject* frame);
rtc::CriticalSection crit_;
@@ -66,13 +93,25 @@ class PacketBuffer {
size_t size_ GUARDED_BY(crit_);
const size_t max_size_;
- uint16_t last_seq_num_ GUARDED_BY(crit_);
+ // The fist sequence number currently in the buffer.
uint16_t first_seq_num_ GUARDED_BY(crit_);
- bool initialized_ GUARDED_BY(crit_);
+
+ // The last sequence number currently in the buffer.
+ uint16_t last_seq_num_ GUARDED_BY(crit_);
+
+ // If the packet buffer has received its first packet.
+ bool first_packet_received_ GUARDED_BY(crit_);
+
+ // Buffer that holds the inserted packets.
std::vector<VCMPacket> data_buffer_ GUARDED_BY(crit_);
+
+ // Buffer that holds the information about which slot that is currently in use
+ // and information needed to determine the continuity between packets.
std::vector<ContinuityInfo> sequence_buffer_ GUARDED_BY(crit_);
- OnCompleteFrameCallback* const frame_callback_;
+ // Frames that have received all their packets are handed off to the
+ // |reference_finder_| which finds the dependencies between the frames.
+ RtpFrameReferenceFinder reference_finder_;
};
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
index bc06940391c..b50074d8f60 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
@@ -10,6 +10,9 @@
#include <cstring>
#include <limits>
+#include <map>
+#include <set>
+#include <utility>
#include "webrtc/modules/video_coding/frame_object.h"
#include "webrtc/modules/video_coding/packet_buffer.h"
@@ -24,142 +27,301 @@ class TestPacketBuffer : public ::testing::Test,
public OnCompleteFrameCallback {
protected:
TestPacketBuffer()
- : rand_(0x8739211), packet_buffer_(kStartSize, kMaxSize, this) {}
+ : rand_(0x8739211),
+ packet_buffer_(new PacketBuffer(kStartSize, kMaxSize, this)),
+ frames_from_callback_(FrameComp()) {}
uint16_t Rand() { return rand_.Rand(std::numeric_limits<uint16_t>::max()); }
void OnCompleteFrame(std::unique_ptr<FrameObject> frame) override {
- frames_from_callback_.emplace_back(std::move(frame));
+ uint16_t pid = frame->picture_id;
+ uint16_t sidx = frame->spatial_layer;
+ auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
+ if (frame_it != frames_from_callback_.end()) {
+ ADD_FAILURE() << "Already received frame with (pid:sidx): ("
+ << pid << ":" << sidx << ")";
+ return;
+ }
+
+ frames_from_callback_.insert(
+ std::make_pair(std::make_pair(pid, sidx), std::move(frame)));
}
void TearDown() override {
- // All FrameObjects must be destroyed before the PacketBuffer since
- // a FrameObject will try to remove itself from the packet buffer
+ // All frame objects must be destroyed before the packet buffer since
+ // a frame object will try to remove itself from the packet buffer
// upon destruction.
frames_from_callback_.clear();
}
+ // Short version of true and false.
+ enum {
+ kT = true,
+ kF = false
+ };
+
+ // Insert a generic packet into the packet buffer.
+ void InsertGeneric(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecGeneric;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp8 packet into the packet buffer.
+ void InsertVp8(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool sync = false, // is sync frame
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP8;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = tid;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = sync;
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp9 packet into the packet buffer.
+ void InsertVp9Gof(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool up = false, // frame is up-switch point
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t sid = kNoSpatialIdx, // spatial id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ GofInfoVP9* ss = nullptr, // scalability structure
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP9;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet.codecSpecificHeader.codecHeader.VP9.picture_id = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_idx = tid;
+ packet.codecSpecificHeader.codecHeader.VP9.spatial_idx = sid;
+ packet.codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_up_switch = up;
+ if (ss != nullptr) {
+ packet.codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet.codecSpecificHeader.codecHeader.VP9.gof = *ss;
+ }
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp9 packet into the packet buffer.
+ void InsertVp9Flex(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool inter, // depends on S-1 layer
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t sid = kNoSpatialIdx, // spatial id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ std::vector<uint8_t> refs =
+ std::vector<uint8_t>(), // frame references
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP9;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP9.inter_layer_predicted = inter;
+ packet.codecSpecificHeader.codecHeader.VP9.flexible_mode = true;
+ packet.codecSpecificHeader.codecHeader.VP9.picture_id = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_idx = tid;
+ packet.codecSpecificHeader.codecHeader.VP9.spatial_idx = sid;
+ packet.codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP9.num_ref_pics = refs.size();
+ for (size_t i = 0; i < refs.size(); ++i)
+ packet.codecSpecificHeader.codecHeader.VP9.pid_diff[i] = refs[i];
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Check if a frame with picture id |pid| and spatial index |sidx| has been
+ // delivered from the packet buffer, and if so, if it has the references
+ // specified by |refs|.
+ template <typename... T>
+ void CheckReferences(uint16_t pid, uint16_t sidx, T... refs) const {
+ auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
+ if (frame_it == frames_from_callback_.end()) {
+ ADD_FAILURE() << "Could not find frame with (pid:sidx): ("
+ << pid << ":" << sidx << ")";
+ return;
+ }
+
+ std::set<uint16_t> actual_refs;
+ for (uint8_t r = 0; r < frame_it->second->num_references; ++r) {
+ actual_refs.insert(frame_it->second->references[r]);
+ }
+
+ std::set<uint16_t> expected_refs;
+ RefsToSet(&expected_refs, refs...);
+
+ ASSERT_EQ(expected_refs, actual_refs);
+ }
+
+ template <typename... T>
+ void CheckReferencesGeneric(uint16_t pid, T... refs) const {
+ CheckReferences(pid, 0, refs...);
+ }
+
+ template <typename... T>
+ void CheckReferencesVp8(uint16_t pid, T... refs) const {
+ CheckReferences(pid, 0, refs...);
+ }
+
+ template <typename... T>
+ void CheckReferencesVp9(uint16_t pid, uint8_t sidx, T... refs) const {
+ CheckReferences(pid, sidx, refs...);
+ }
+
+ template <typename... T>
+ void RefsToSet(std::set<uint16_t>* m, uint16_t ref, T... refs) const {
+ m->insert(ref);
+ RefsToSet(m, refs...);
+ }
+
+ void RefsToSet(std::set<uint16_t>* m) const {}
+
const int kStartSize = 16;
const int kMaxSize = 64;
Random rand_;
- PacketBuffer packet_buffer_;
- std::vector<std::unique_ptr<FrameObject>> frames_from_callback_;
+ std::unique_ptr<PacketBuffer> packet_buffer_;
+ struct FrameComp {
+ bool operator()(const std::pair<uint16_t, uint8_t> f1,
+ const std::pair<uint16_t, uint8_t> f2) const {
+ if (f1.first == f2.first)
+ return f1.second < f2.second;
+ return f1.first < f2.first;
+ }
+ };
+ std::map<std::pair<uint16_t, uint8_t>,
+ std::unique_ptr<FrameObject>,
+ FrameComp> frames_from_callback_;
};
TEST_F(TestPacketBuffer, InsertOnePacket) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, InsertMultiplePackets) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, InsertDuplicatePacket) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, ExpandBuffer) {
- VCMPacket packet;
- packet.seqNum = Rand();
+ uint16_t seq_num = Rand();
for (int i = 0; i < kStartSize + 1; ++i) {
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + i, kT , kT, kT);
}
}
TEST_F(TestPacketBuffer, ExpandBufferOverflow) {
- VCMPacket packet;
- packet.seqNum = Rand();
+ uint16_t seq_num = Rand();
for (int i = 0; i < kMaxSize; ++i) {
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + i, kT, kT , kT);
}
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
+ VCMPacket packet;
+ packet.seqNum = seq_num + kMaxSize + 1;
+ packet.sizeBytes = 1;
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
}
-TEST_F(TestPacketBuffer, OnePacketOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(1UL, frames_from_callback_.size());
+TEST_F(TestPacketBuffer, GenericOnePacketOneFrame) {
+ // seq_num, kf, frst, lst
+ InsertGeneric(Rand() , kT, kT , kT);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, TwoPacketsTwoFrames) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, GenericTwoPacketsTwoFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kT);
+ InsertGeneric(seq_num + 1, kT, kT , kT);
+
EXPECT_EQ(2UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, TwoPacketsOneFrames) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.markerBit = true;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(1UL, frames_from_callback_.size());
-}
+TEST_F(TestPacketBuffer, GenericTwoPacketsOneFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kT, kF , kT);
-TEST_F(TestPacketBuffer, ThreePacketReorderingOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- packet.markerBit = true;
- packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.markerBit = false;
- packet.seqNum -= 1;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
EXPECT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, IndexWrapOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = kStartSize - 1;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.markerBit = true;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, GenericThreePacketReorderingOneFrame) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 2, kT, kF , kT);
+ InsertGeneric(seq_num + 1, kT, kF , kF);
+
EXPECT_EQ(1UL, frames_from_callback_.size());
}
@@ -167,45 +329,77 @@ TEST_F(TestPacketBuffer, DiscardOldPacket) {
uint16_t seq_num = Rand();
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
for (int i = 3; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
++packet.seqNum;
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
- packet_buffer_.ClearTo(seq_num + 1);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
+ packet_buffer_->ClearTo(seq_num + 1);
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, DiscardMultipleOldPackets) {
uint16_t seq_num = Rand();
VCMPacket packet;
packet.seqNum = seq_num;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
for (int i = 3; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
- packet_buffer_.ClearTo(seq_num + 15);
+ packet_buffer_->ClearTo(seq_num + 15);
for (int i = 0; i < 15; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
for (int i = 15; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
}
}
+TEST_F(TestPacketBuffer, GenericFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf , first, last
+ InsertGeneric(seq_num , true , true , true);
+ InsertGeneric(seq_num + 1, false, true , true);
+ InsertGeneric(seq_num + 2, false, true , true);
+ InsertGeneric(seq_num + 3, false, true , true);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesGeneric(seq_num);
+ CheckReferencesGeneric(seq_num + 1, seq_num);
+ CheckReferencesGeneric(seq_num + 2, seq_num + 1);
+ CheckReferencesGeneric(seq_num + 3, seq_num + 2);
+}
+
+TEST_F(TestPacketBuffer, GenericFramesReordered) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf , first, last
+ InsertGeneric(seq_num + 1, false, true , true);
+ InsertGeneric(seq_num , true , true , true);
+ InsertGeneric(seq_num + 3, false, true , true);
+ InsertGeneric(seq_num + 2, false, true , true);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesGeneric(seq_num);
+ CheckReferencesGeneric(seq_num + 1, seq_num);
+ CheckReferencesGeneric(seq_num + 2, seq_num + 1);
+ CheckReferencesGeneric(seq_num + 3, seq_num + 2);
+}
+
TEST_F(TestPacketBuffer, GetBitstreamFromFrame) {
// "many bitstream, such data" with null termination.
uint8_t many[] = {0x6d, 0x61, 0x6e, 0x79, 0x20};
@@ -216,89 +410,997 @@ TEST_F(TestPacketBuffer, GetBitstreamFromFrame) {
uint8_t
result[sizeof(many) + sizeof(bitstream) + sizeof(such) + sizeof(data)];
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = 0xfffe;
- packet.dataPtr = many;
- packet.sizeBytes = sizeof(many);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.isFirstPacket = false;
- ++packet.seqNum;
- packet.dataPtr = bitstream;
- packet.sizeBytes = sizeof(bitstream);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- packet.dataPtr = such;
- packet.sizeBytes = sizeof(such);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.markerBit = true;
- ++packet.seqNum;
- packet.dataPtr = data;
- packet.sizeBytes = sizeof(data);
- EXPECT_EQ(0UL, frames_from_callback_.size());
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ASSERT_EQ(1UL, frames_from_callback_.size());
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, data_size , data
+ InsertGeneric(seq_num , kT, kT , kF , sizeof(many) , many);
+ InsertGeneric(seq_num + 1, kF, kF , kF , sizeof(bitstream), bitstream);
+ InsertGeneric(seq_num + 2, kF, kF , kF , sizeof(such) , such);
+ InsertGeneric(seq_num + 3, kF, kF , kT , sizeof(data) , data);
- EXPECT_TRUE(frames_from_callback_[0]->GetBitstream(result));
- EXPECT_EQ(
- std::strcmp("many bitstream, such data", reinterpret_cast<char*>(result)),
- 0);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 3);
+ EXPECT_TRUE(frames_from_callback_[std::make_pair(seq_num + 3, 0)]->
+ GetBitstream(result));
+ EXPECT_EQ(std::strcmp("many bitstream, such data",
+ reinterpret_cast<char*>(result)),
+ 0);
}
TEST_F(TestPacketBuffer, FreeSlotsOnFrameDestruction) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- packet.markerBit = true;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
EXPECT_EQ(1UL, frames_from_callback_.size());
frames_from_callback_.clear();
- packet.isFirstPacket = true;
- packet.markerBit = false;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- packet.markerBit = true;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
EXPECT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, Flush) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet_buffer_.Flush();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, Clear) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
+ EXPECT_EQ(1UL, frames_from_callback_.size());
+
+ packet_buffer_->Clear();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + kStartSize , kT, kT , kF);
+ InsertGeneric(seq_num + kStartSize + 1, kF, kF , kF);
+ InsertGeneric(seq_num + kStartSize + 2, kF, kF , kT);
EXPECT_EQ(2UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, InvalidateFrameByFlushing) {
+TEST_F(TestPacketBuffer, InvalidateFrameByClearing) {
VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
+ packet.codec = kVideoCodecGeneric;
+ packet.frameType = kVideoFrameKey;
+ packet.isFirstPacket = kT;
+ packet.markerBit = kT;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
ASSERT_EQ(1UL, frames_from_callback_.size());
- packet_buffer_.Flush();
- EXPECT_FALSE(frames_from_callback_[0]->GetBitstream(nullptr));
+ packet_buffer_->Clear();
+ EXPECT_FALSE(frames_from_callback_.begin()->second->GetBitstream(nullptr));
+}
+
+TEST_F(TestPacketBuffer, Vp8NoPictureId) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertVp8(seq_num , kT, kT , kF);
+ InsertVp8(seq_num + 1 , kF, kF , kF);
+ InsertVp8(seq_num + 2 , kF, kF , kT);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 3 , kF, kT , kF);
+ InsertVp8(seq_num + 4 , kF, kF , kT);
+ ASSERT_EQ(2UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 5 , kF, kT , kF);
+ InsertVp8(seq_num + 6 , kF, kF , kF);
+ InsertVp8(seq_num + 7 , kF, kF , kF);
+ InsertVp8(seq_num + 8 , kF, kF , kT);
+ ASSERT_EQ(3UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 9 , kF, kT , kT);
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 10, kF, kT , kF);
+ InsertVp8(seq_num + 11, kF, kF , kT);
+ ASSERT_EQ(5UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 12, kT, kT , kT);
+ ASSERT_EQ(6UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 13, kF, kT , kF);
+ InsertVp8(seq_num + 14, kF, kF , kF);
+ InsertVp8(seq_num + 15, kF, kF , kF);
+ InsertVp8(seq_num + 16, kF, kF , kF);
+ InsertVp8(seq_num + 17, kF, kF , kT);
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 18, kF, kT , kT);
+ ASSERT_EQ(8UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 19, kF, kT , kF);
+ InsertVp8(seq_num + 20, kF, kF , kT);
+ ASSERT_EQ(9UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 21, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 2);
+ CheckReferencesVp8(seq_num + 4, seq_num + 2);
+ CheckReferencesVp8(seq_num + 8, seq_num + 4);
+ CheckReferencesVp8(seq_num + 9, seq_num + 8);
+ CheckReferencesVp8(seq_num + 11, seq_num + 9);
+ CheckReferencesVp8(seq_num + 12);
+ CheckReferencesVp8(seq_num + 17, seq_num + 12);
+ CheckReferencesVp8(seq_num + 18, seq_num + 17);
+ CheckReferencesVp8(seq_num + 20, seq_num + 18);
+ CheckReferencesVp8(seq_num + 21, seq_num + 20);
+}
+
+TEST_F(TestPacketBuffer, Vp8NoPictureIdReordered) {
+ uint16_t seq_num = 0xfffa;
+
+ // seq_num , kf, frst, lst
+ InsertVp8(seq_num + 1 , kF, kF , kF);
+ InsertVp8(seq_num , kT, kT , kF);
+ InsertVp8(seq_num + 2 , kF, kF , kT);
+ InsertVp8(seq_num + 4 , kF, kF , kT);
+ InsertVp8(seq_num + 6 , kF, kF , kF);
+ InsertVp8(seq_num + 3 , kF, kT , kF);
+ InsertVp8(seq_num + 7 , kF, kF , kF);
+ InsertVp8(seq_num + 5 , kF, kT , kF);
+ InsertVp8(seq_num + 9 , kF, kT , kT);
+ InsertVp8(seq_num + 10, kF, kT , kF);
+ InsertVp8(seq_num + 8 , kF, kF , kT);
+ InsertVp8(seq_num + 13, kF, kT , kF);
+ InsertVp8(seq_num + 14, kF, kF , kF);
+ InsertVp8(seq_num + 12, kT, kT , kT);
+ InsertVp8(seq_num + 11, kF, kF , kT);
+ InsertVp8(seq_num + 16, kF, kF , kF);
+ InsertVp8(seq_num + 19, kF, kT , kF);
+ InsertVp8(seq_num + 15, kF, kF , kF);
+ InsertVp8(seq_num + 17, kF, kF , kT);
+ InsertVp8(seq_num + 20, kF, kF , kT);
+ InsertVp8(seq_num + 21, kF, kT , kT);
+ InsertVp8(seq_num + 18, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 2);
+ CheckReferencesVp8(seq_num + 4, seq_num + 2);
+ CheckReferencesVp8(seq_num + 8, seq_num + 4);
+ CheckReferencesVp8(seq_num + 9, seq_num + 8);
+ CheckReferencesVp8(seq_num + 11, seq_num + 9);
+ CheckReferencesVp8(seq_num + 12);
+ CheckReferencesVp8(seq_num + 17, seq_num + 12);
+ CheckReferencesVp8(seq_num + 18, seq_num + 17);
+ CheckReferencesVp8(seq_num + 20, seq_num + 18);
+ CheckReferencesVp8(seq_num + 21, seq_num + 20);
+}
+
+
+TEST_F(TestPacketBuffer, Vp8KeyFrameReferences) {
+ uint16_t pid = Rand();
+ // seq_num, kf, frst, lst, sync, pid, tid, tl0
+ InsertVp8(Rand() , kT, kT , kT , kF , pid, 0 , 0);
+
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+}
+
+// Test with 1 temporal layer.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_0) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 1);
+ InsertVp8(seq_num + 1, kF, kT , kT , kF , pid + 1, 0 , 2);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 3);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 0 , 4);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+}
+
+// Test with 1 temporal layer.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_0) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 1);
+ InsertVp8(seq_num + 1, kF, kT , kT , kF , pid + 1, 0 , 2);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 0 , 4);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 3);
+ InsertVp8(seq_num + 5, kF, kT , kT , kF , pid + 5, 0 , 6);
+ InsertVp8(seq_num + 6, kF, kT , kT , kF , pid + 6, 0 , 7);
+ InsertVp8(seq_num + 4, kF, kT , kT , kF , pid + 4, 0 , 5);
+
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+ CheckReferencesVp8(pid + 4, pid + 3);
+ CheckReferencesVp8(pid + 5, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 5);
+}
+
+// Test with 2 temporal layers in a 01 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_01) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0, 255);
+ InsertVp8(seq_num + 1, kF, kT , kT , kT , pid + 1, 1, 255);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0, 0);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 1, 0);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid + 1, pid + 2);
+}
+
+// Test with 2 temporal layers in a 01 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_01) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num + 1, kF, kT , kT , kT , pid + 1, 1 , 255);
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 255);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 1 , 0);
+ InsertVp8(seq_num + 5, kF, kT , kT , kF , pid + 5, 1 , 1);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 0);
+ InsertVp8(seq_num + 4, kF, kT , kT , kF , pid + 4, 0 , 1);
+ InsertVp8(seq_num + 6, kF, kT , kT , kF , pid + 6, 0 , 2);
+ InsertVp8(seq_num + 7, kF, kT , kT , kF , pid + 7, 1 , 2);
+
+ ASSERT_EQ(8UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4, pid + 2);
+ CheckReferencesVp8(pid + 5, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 4);
+ CheckReferencesVp8(pid + 7, pid + 5, pid + 6);
+}
+
+// Test with 3 temporal layers in a 0212 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_0212) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 55);
+ InsertVp8(seq_num + 1 , kF, kT , kT , kT , pid + 1 , 2 , 55);
+ InsertVp8(seq_num + 2 , kF, kT , kT , kT , pid + 2 , 1 , 55);
+ InsertVp8(seq_num + 3 , kF, kT , kT , kF , pid + 3 , 2 , 55);
+ InsertVp8(seq_num + 4 , kF, kT , kT , kF , pid + 4 , 0 , 56);
+ InsertVp8(seq_num + 5 , kF, kT , kT , kF , pid + 5 , 2 , 56);
+ InsertVp8(seq_num + 6 , kF, kT , kT , kF , pid + 6 , 1 , 56);
+ InsertVp8(seq_num + 7 , kF, kT , kT , kF , pid + 7 , 2 , 56);
+ InsertVp8(seq_num + 8 , kF, kT , kT , kF , pid + 8 , 0 , 57);
+ InsertVp8(seq_num + 9 , kF, kT , kT , kT , pid + 9 , 2 , 57);
+ InsertVp8(seq_num + 10, kF, kT , kT , kT , pid + 10, 1 , 57);
+ InsertVp8(seq_num + 11, kF, kT , kT , kF , pid + 11, 2 , 57);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1 , pid);
+ CheckReferencesVp8(pid + 2 , pid);
+ CheckReferencesVp8(pid + 3 , pid, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4 , pid);
+ CheckReferencesVp8(pid + 5 , pid + 2, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6 , pid + 2, pid + 4);
+ CheckReferencesVp8(pid + 7 , pid + 4, pid + 5, pid + 6);
+ CheckReferencesVp8(pid + 8 , pid + 4);
+ CheckReferencesVp8(pid + 9 , pid + 8);
+ CheckReferencesVp8(pid + 10, pid + 8);
+ CheckReferencesVp8(pid + 11, pid + 8, pid + 9, pid + 10);
+}
+
+// Test with 3 temporal layers in a 0212 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_0212) {
+ uint16_t pid = 126;
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num + 1 , kF, kT , kT , kT , pid + 1 , 2 , 55);
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 55);
+ InsertVp8(seq_num + 2 , kF, kT , kT , kT , pid + 2 , 1 , 55);
+ InsertVp8(seq_num + 4 , kF, kT , kT , kF , pid + 4 , 0 , 56);
+ InsertVp8(seq_num + 5 , kF, kT , kT , kF , pid + 5 , 2 , 56);
+ InsertVp8(seq_num + 3 , kF, kT , kT , kF , pid + 3 , 2 , 55);
+ InsertVp8(seq_num + 7 , kF, kT , kT , kF , pid + 7 , 2 , 56);
+ InsertVp8(seq_num + 9 , kF, kT , kT , kT , pid + 9 , 2 , 57);
+ InsertVp8(seq_num + 6 , kF, kT , kT , kF , pid + 6 , 1 , 56);
+ InsertVp8(seq_num + 8 , kF, kT , kT , kF , pid + 8 , 0 , 57);
+ InsertVp8(seq_num + 11, kF, kT , kT , kF , pid + 11, 2 , 57);
+ InsertVp8(seq_num + 10, kF, kT , kT , kT , pid + 10, 1 , 57);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1 , pid);
+ CheckReferencesVp8(pid + 2 , pid);
+ CheckReferencesVp8(pid + 3 , pid, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4 , pid);
+ CheckReferencesVp8(pid + 5 , pid + 2, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6 , pid + 2, pid + 4);
+ CheckReferencesVp8(pid + 7 , pid + 4, pid + 5, pid + 6);
+ CheckReferencesVp8(pid + 8 , pid + 4);
+ CheckReferencesVp8(pid + 9 , pid + 8);
+ CheckReferencesVp8(pid + 10, pid + 8);
+ CheckReferencesVp8(pid + 11, pid + 8, pid + 9, pid + 10);
+}
+
+TEST_F(TestPacketBuffer, Vp8InsertManyFrames_0212) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ const int keyframes_to_insert = 50;
+ const int frames_per_keyframe = 120; // Should be a multiple of 4.
+ uint8_t tl0 = 128;
+
+ for (int k = 0; k < keyframes_to_insert; ++k) {
+ // seq_num , keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT , kT , kT , kF , pid , 0 , tl0);
+ InsertVp8(seq_num + 1, kF , kT , kT , kT , pid + 1, 2 , tl0);
+ InsertVp8(seq_num + 2, kF , kT , kT , kT , pid + 2, 1 , tl0);
+ InsertVp8(seq_num + 3, kF , kT , kT , kF , pid + 3, 2 , tl0);
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid, pid + 1, pid + 2);
+ frames_from_callback_.clear();
+ ++tl0;
+
+ for (int f = 4; f < frames_per_keyframe; f += 4) {
+ uint16_t sf = seq_num + f;
+ uint16_t pidf = pid + f;
+
+ // seq_num, keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(sf , kF , kT , kT , kF , pidf , 0 , tl0);
+ InsertVp8(sf + 1 , kF , kT , kT , kF , pidf + 1, 2 , tl0);
+ InsertVp8(sf + 2 , kF , kT , kT , kF , pidf + 2, 1 , tl0);
+ InsertVp8(sf + 3 , kF , kT , kT , kF , pidf + 3, 2 , tl0);
+ CheckReferencesVp8(pidf, pidf - 4);
+ CheckReferencesVp8(pidf + 1, pidf, pidf - 1, pidf - 2);
+ CheckReferencesVp8(pidf + 2, pidf, pidf - 2);
+ CheckReferencesVp8(pidf + 3, pidf, pidf + 1, pidf + 2);
+ frames_from_callback_.clear();
+ ++tl0;
+ }
+
+ pid += frames_per_keyframe;
+ seq_num += frames_per_keyframe;
+ }
+}
+
+TEST_F(TestPacketBuffer, Vp8LayerSync) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT , kT , kT , kF , pid , 0 , 0);
+ InsertVp8(seq_num + 1 , kF , kT , kT , kT , pid + 1 , 1 , 0);
+ InsertVp8(seq_num + 2 , kF , kT , kT , kF , pid + 2 , 0 , 1);
+ ASSERT_EQ(3UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 4 , kF , kT , kT , kF , pid + 4 , 0 , 2);
+ InsertVp8(seq_num + 5 , kF , kT , kT , kT , pid + 5 , 1 , 2);
+ InsertVp8(seq_num + 6 , kF , kT , kT , kF , pid + 6 , 0 , 3);
+ InsertVp8(seq_num + 7 , kF , kT , kT , kF , pid + 7 , 1 , 3);
+
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 4, pid + 2);
+ CheckReferencesVp8(pid + 5, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 4);
+ CheckReferencesVp8(pid + 7, pid + 6, pid + 5);
+}
+
+TEST_F(TestPacketBuffer, Vp8InsertLargeFrames) {
+ packet_buffer_.reset(new PacketBuffer(1 << 3, 1 << 12, this));
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ const uint16_t packets_per_frame = 1000;
+ uint16_t current = seq_num;
+ uint16_t end = current + packets_per_frame;
+
+ // seq_num , keyf, frst, lst, sync, pid, tid, tl0
+ InsertVp8(current++, kT , kT , kF , kF , pid, 0 , 0);
+ while (current != end)
+ InsertVp8(current++, kF , kF , kF , kF , pid, 0 , 0);
+ InsertVp8(current++, kF , kF , kT , kF , pid, 0 , 0);
+ end = current + packets_per_frame;
+
+ for (int f = 1; f < 4; ++f) {
+ InsertVp8(current++, kF , kT , kF , kF , pid + f, 0, f);
+ while (current != end)
+ InsertVp8(current++, kF , kF , kF , kF , pid + f, 0, f);
+ InsertVp8(current++, kF , kF , kT , kF , pid + f, 0, f);
+ end = current + packets_per_frame;
+ }
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofInsertOneFrame) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ // seq_num, keyf, frst, lst, up, pid, sid, tid, tl0, ss
+ InsertVp9Gof(seq_num, kT , kT , kT , kF, pid, 0 , 0 , 0 , &ss);
+
+ CheckReferencesVp9(pid, 0);
+}
+
+TEST_F(TestPacketBuffer, Vp9NoPictureIdReordered) {
+ uint16_t sn = 0xfffa;
+
+ // sn , kf, frst, lst
+ InsertVp9Gof(sn + 1 , kF, kF , kF);
+ InsertVp9Gof(sn , kT, kT , kF);
+ InsertVp9Gof(sn + 2 , kF, kF , kT);
+ InsertVp9Gof(sn + 4 , kF, kF , kT);
+ InsertVp9Gof(sn + 6 , kF, kF , kF);
+ InsertVp9Gof(sn + 3 , kF, kT , kF);
+ InsertVp9Gof(sn + 7 , kF, kF , kF);
+ InsertVp9Gof(sn + 5 , kF, kT , kF);
+ InsertVp9Gof(sn + 9 , kF, kT , kT);
+ InsertVp9Gof(sn + 10, kF, kT , kF);
+ InsertVp9Gof(sn + 8 , kF, kF , kT);
+ InsertVp9Gof(sn + 13, kF, kT , kF);
+ InsertVp9Gof(sn + 14, kF, kF , kF);
+ InsertVp9Gof(sn + 12, kT, kT , kT);
+ InsertVp9Gof(sn + 11, kF, kF , kT);
+ InsertVp9Gof(sn + 16, kF, kF , kF);
+ InsertVp9Gof(sn + 19, kF, kT , kF);
+ InsertVp9Gof(sn + 15, kF, kF , kF);
+ InsertVp9Gof(sn + 17, kF, kF , kT);
+ InsertVp9Gof(sn + 20, kF, kF , kT);
+ InsertVp9Gof(sn + 21, kF, kT , kT);
+ InsertVp9Gof(sn + 18, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp9(sn + 2 , 0);
+ CheckReferencesVp9(sn + 4 , 0, sn + 2);
+ CheckReferencesVp9(sn + 8 , 0, sn + 4);
+ CheckReferencesVp9(sn + 9 , 0, sn + 8);
+ CheckReferencesVp9(sn + 11, 0, sn + 9);
+ CheckReferencesVp9(sn + 12, 0);
+ CheckReferencesVp9(sn + 17, 0, sn + 12);
+ CheckReferencesVp9(sn + 18, 0, sn + 17);
+ CheckReferencesVp9(sn + 20, 0, sn + 18);
+ CheckReferencesVp9(sn + 21, 0, sn + 20);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_0) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1); // Only 1 spatial layer.
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 0 , 5);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 6);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 0 , 7);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 8);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 0 , 9);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 10);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 0 , 11);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 12);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 0 , 13);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 14);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 0 , 15);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 16);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 0 , 17);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 18);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 0 , 19);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid + 1);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 3);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 5);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 7);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 9);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 11);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 13);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 15);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 17);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_0) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1); // Only 1 spatial layer.
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 0 , 1);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 0 , 5);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 0 , 7);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 6);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 8);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 10);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 0 , 13);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 0 , 11);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 0 , 9);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 16);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 14);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 0 , 15);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 12);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 0 , 17);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 0 , 19);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 18);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid + 1);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 3);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 5);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 7);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 9);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 11);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 13);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 15);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 17);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_01) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 0101 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 1 , 2);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 1 , 3);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 1 , 4);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 5);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 1 , 5);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 6);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 1 , 6);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 7);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 1 , 7);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 8);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 1 , 8);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 9);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 1 , 9);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 2);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 6);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 10);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 14);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_01) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 01 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 1 , 2);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 1 , 3);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 5);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 1 , 4);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 1 , 5);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 1 , 6);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 8);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 6);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 7);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 1 , 8);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 1 , 9);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 1 , 7);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 9);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 2);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 6);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 10);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 14);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 4);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 2 , 4);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 1 , 4);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 2 , 4);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 12);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 17, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 4);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 2 , 4);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 2 , 4);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 1 , 4);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 12);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 17, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersUpSwitch_02120212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode4); // 02120212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kT, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kT, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kT, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+
+ ASSERT_EQ(16UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 3, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 2, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 11, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 10, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersUpSwitchReordered_02120212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode4); // 02120212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kT, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kT, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kT, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+
+ ASSERT_EQ(16UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 3, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 2, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 11, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 10, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_01_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 01 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 2);
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2 , &ss);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 3);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 3);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 3);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeOneFrame) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn, kf, frst, lst, intr, pid, sid, tid, tl0
+ InsertVp9Flex(sn, kT, kT , kT , kF , pid, 0 , 0 , 0);
+
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeTwoSpatialLayers) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn , kf, frst, lst, intr, pid , sid, tid, tl0, refs
+ InsertVp9Flex(sn , kT, kT , kT , kF , pid , 0 , 0 , 0);
+ InsertVp9Flex(sn + 1 , kT, kT , kT , kT , pid , 1 , 0 , 0);
+ InsertVp9Flex(sn + 2 , kF, kT , kT , kF , pid + 1, 1 , 0 , 0 , {1});
+ InsertVp9Flex(sn + 3 , kF, kT , kT , kF , pid + 2, 0 , 0 , 1 , {2});
+ InsertVp9Flex(sn + 4 , kF, kT , kT , kF , pid + 2, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 5 , kF, kT , kT , kF , pid + 3, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 6 , kF, kT , kT , kF , pid + 4, 0 , 0 , 2 , {2});
+ InsertVp9Flex(sn + 7 , kF, kT , kT , kF , pid + 4, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 8 , kF, kT , kT , kF , pid + 5, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 9 , kF, kT , kT , kF , pid + 6, 0 , 0 , 3 , {2});
+ InsertVp9Flex(sn + 10, kF, kT , kT , kF , pid + 6, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 11, kF, kT , kT , kF , pid + 7, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 12, kF, kT , kT , kF , pid + 8, 0 , 0 , 4 , {2});
+ InsertVp9Flex(sn + 13, kF, kT , kT , kF , pid + 8, 1 , 0 , 4 , {1});
+
+ ASSERT_EQ(14UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid , 0);
+ CheckReferencesVp9(pid , 1);
+ CheckReferencesVp9(pid + 1, 1, pid);
+ CheckReferencesVp9(pid + 2, 0, pid);
+ CheckReferencesVp9(pid + 2, 1, pid + 1);
+ CheckReferencesVp9(pid + 3, 1, pid + 2);
+ CheckReferencesVp9(pid + 4, 0, pid + 2);
+ CheckReferencesVp9(pid + 4, 1, pid + 3);
+ CheckReferencesVp9(pid + 5, 1, pid + 4);
+ CheckReferencesVp9(pid + 6, 0, pid + 4);
+ CheckReferencesVp9(pid + 6, 1, pid + 5);
+ CheckReferencesVp9(pid + 7, 1, pid + 6);
+ CheckReferencesVp9(pid + 8, 0, pid + 6);
+ CheckReferencesVp9(pid + 8, 1, pid + 7);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeTwoSpatialLayersReordered) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn , kf, frst, lst, intr, pid , sid, tid, tl0, refs
+ InsertVp9Flex(sn + 1 , kT, kT , kT , kT , pid , 1 , 0 , 0);
+ InsertVp9Flex(sn + 2 , kF, kT , kT , kF , pid + 1, 1 , 0 , 0 , {1});
+ InsertVp9Flex(sn , kT, kT , kT , kF , pid , 0 , 0 , 0);
+ InsertVp9Flex(sn + 4 , kF, kT , kT , kF , pid + 2, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 5 , kF, kT , kT , kF , pid + 3, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 3 , kF, kT , kT , kF , pid + 2, 0 , 0 , 1 , {2});
+ InsertVp9Flex(sn + 7 , kF, kT , kT , kF , pid + 4, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 6 , kF, kT , kT , kF , pid + 4, 0 , 0 , 2 , {2});
+ InsertVp9Flex(sn + 8 , kF, kT , kT , kF , pid + 5, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 9 , kF, kT , kT , kF , pid + 6, 0 , 0 , 3 , {2});
+ InsertVp9Flex(sn + 11, kF, kT , kT , kF , pid + 7, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 10, kF, kT , kT , kF , pid + 6, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 13, kF, kT , kT , kF , pid + 8, 1 , 0 , 4 , {1});
+ InsertVp9Flex(sn + 12, kF, kT , kT , kF , pid + 8, 0 , 0 , 4 , {2});
+
+ ASSERT_EQ(14UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid , 0);
+ CheckReferencesVp9(pid , 1);
+ CheckReferencesVp9(pid + 1, 1, pid);
+ CheckReferencesVp9(pid + 2, 0, pid);
+ CheckReferencesVp9(pid + 2, 1, pid + 1);
+ CheckReferencesVp9(pid + 3, 1, pid + 2);
+ CheckReferencesVp9(pid + 4, 0, pid + 2);
+ CheckReferencesVp9(pid + 4, 1, pid + 3);
+ CheckReferencesVp9(pid + 5, 1, pid + 4);
+ CheckReferencesVp9(pid + 6, 0, pid + 4);
+ CheckReferencesVp9(pid + 6, 1, pid + 5);
+ CheckReferencesVp9(pid + 7, 1, pid + 6);
+ CheckReferencesVp9(pid + 8, 0, pid + 6);
+ CheckReferencesVp9(pid + 8, 1, pid + 7);
}
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select.cc b/chromium/third_party/webrtc/modules/video_coding/qm_select.cc
deleted file mode 100644
index 9da42bb33c6..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select.cc
+++ /dev/null
@@ -1,953 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/qm_select.h"
-
-#include <math.h>
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-#include "webrtc/modules/video_coding/internal_defines.h"
-#include "webrtc/modules/video_coding/qm_select_data.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// QM-METHOD class
-
-VCMQmMethod::VCMQmMethod()
- : content_metrics_(NULL),
- width_(0),
- height_(0),
- user_frame_rate_(0.0f),
- native_width_(0),
- native_height_(0),
- native_frame_rate_(0.0f),
- image_type_(kVGA),
- framerate_level_(kFrameRateHigh),
- init_(false) {
- ResetQM();
-}
-
-VCMQmMethod::~VCMQmMethod() {}
-
-void VCMQmMethod::ResetQM() {
- aspect_ratio_ = 1.0f;
- motion_.Reset();
- spatial_.Reset();
- content_class_ = 0;
-}
-
-uint8_t VCMQmMethod::ComputeContentClass() {
- ComputeMotionNFD();
- ComputeSpatial();
- return content_class_ = 3 * motion_.level + spatial_.level;
-}
-
-void VCMQmMethod::UpdateContent(const VideoContentMetrics* contentMetrics) {
- content_metrics_ = contentMetrics;
-}
-
-void VCMQmMethod::ComputeMotionNFD() {
- if (content_metrics_) {
- motion_.value = content_metrics_->motion_magnitude;
- }
- // Determine motion level.
- if (motion_.value < kLowMotionNfd) {
- motion_.level = kLow;
- } else if (motion_.value > kHighMotionNfd) {
- motion_.level = kHigh;
- } else {
- motion_.level = kDefault;
- }
-}
-
-void VCMQmMethod::ComputeSpatial() {
- float spatial_err = 0.0;
- float spatial_err_h = 0.0;
- float spatial_err_v = 0.0;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
- // Spatial measure: take average of 3 prediction errors.
- spatial_.value = (spatial_err + spatial_err_h + spatial_err_v) / 3.0f;
-
- // Reduce thresholds for large scenes/higher pixel correlation.
- float scale2 = image_type_ > kVGA ? kScaleTexture : 1.0;
-
- if (spatial_.value > scale2 * kHighTexture) {
- spatial_.level = kHigh;
- } else if (spatial_.value < scale2 * kLowTexture) {
- spatial_.level = kLow;
- } else {
- spatial_.level = kDefault;
- }
-}
-
-ImageType VCMQmMethod::GetImageType(uint16_t width, uint16_t height) {
- // Get the image type for the encoder frame size.
- uint32_t image_size = width * height;
- if (image_size == kSizeOfImageType[kQCIF]) {
- return kQCIF;
- } else if (image_size == kSizeOfImageType[kHCIF]) {
- return kHCIF;
- } else if (image_size == kSizeOfImageType[kQVGA]) {
- return kQVGA;
- } else if (image_size == kSizeOfImageType[kCIF]) {
- return kCIF;
- } else if (image_size == kSizeOfImageType[kHVGA]) {
- return kHVGA;
- } else if (image_size == kSizeOfImageType[kVGA]) {
- return kVGA;
- } else if (image_size == kSizeOfImageType[kQFULLHD]) {
- return kQFULLHD;
- } else if (image_size == kSizeOfImageType[kWHD]) {
- return kWHD;
- } else if (image_size == kSizeOfImageType[kFULLHD]) {
- return kFULLHD;
- } else {
- // No exact match, find closet one.
- return FindClosestImageType(width, height);
- }
-}
-
-ImageType VCMQmMethod::FindClosestImageType(uint16_t width, uint16_t height) {
- float size = static_cast<float>(width * height);
- float min = size;
- int isel = 0;
- for (int i = 0; i < kNumImageTypes; ++i) {
- float dist = fabs(size - kSizeOfImageType[i]);
- if (dist < min) {
- min = dist;
- isel = i;
- }
- }
- return static_cast<ImageType>(isel);
-}
-
-FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
- if (avg_framerate <= kLowFrameRate) {
- return kFrameRateLow;
- } else if (avg_framerate <= kMiddleFrameRate) {
- return kFrameRateMiddle1;
- } else if (avg_framerate <= kHighFrameRate) {
- return kFrameRateMiddle2;
- } else {
- return kFrameRateHigh;
- }
-}
-
-// RESOLUTION CLASS
-
-VCMQmResolution::VCMQmResolution() : qm_(new VCMResolutionScale()) {
- Reset();
-}
-
-VCMQmResolution::~VCMQmResolution() {
- delete qm_;
-}
-
-void VCMQmResolution::ResetRates() {
- sum_target_rate_ = 0.0f;
- sum_incoming_framerate_ = 0.0f;
- sum_rate_MM_ = 0.0f;
- sum_rate_MM_sgn_ = 0.0f;
- sum_packet_loss_ = 0.0f;
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- frame_cnt_ = 0;
- frame_cnt_delta_ = 0;
- low_buffer_cnt_ = 0;
- update_rate_cnt_ = 0;
-}
-
-void VCMQmResolution::ResetDownSamplingState() {
- state_dec_factor_spatial_ = 1.0;
- state_dec_factor_temporal_ = 1.0;
- for (int i = 0; i < kDownActionHistorySize; i++) {
- down_action_history_[i].spatial = kNoChangeSpatial;
- down_action_history_[i].temporal = kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::Reset() {
- target_bitrate_ = 0.0f;
- incoming_framerate_ = 0.0f;
- buffer_level_ = 0.0f;
- per_frame_bandwidth_ = 0.0f;
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- encoder_state_ = kStableEncoding;
- num_layers_ = 1;
- ResetRates();
- ResetDownSamplingState();
- ResetQM();
-}
-
-EncoderState VCMQmResolution::GetEncoderState() {
- return encoder_state_;
-}
-
-// Initialize state after re-initializing the encoder,
-// i.e., after SetEncodingData() in mediaOpt.
-int VCMQmResolution::Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers) {
- if (user_framerate == 0.0f || width == 0 || height == 0) {
- return VCM_PARAMETER_ERROR;
- }
- Reset();
- target_bitrate_ = bitrate;
- incoming_framerate_ = user_framerate;
- UpdateCodecParameters(user_framerate, width, height);
- native_width_ = width;
- native_height_ = height;
- native_frame_rate_ = user_framerate;
- num_layers_ = num_layers;
- // Initial buffer level.
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- // Per-frame bandwidth.
- per_frame_bandwidth_ = target_bitrate_ / user_framerate;
- init_ = true;
- return VCM_OK;
-}
-
-void VCMQmResolution::UpdateCodecParameters(float frame_rate,
- uint16_t width,
- uint16_t height) {
- width_ = width;
- height_ = height;
- // |user_frame_rate| is the target frame rate for VPM frame dropper.
- user_frame_rate_ = frame_rate;
- image_type_ = GetImageType(width, height);
-}
-
-// Update rate data after every encoded frame.
-void VCMQmResolution::UpdateEncodedSize(size_t encoded_size) {
- frame_cnt_++;
- // Convert to Kbps.
- float encoded_size_kbits = 8.0f * static_cast<float>(encoded_size) / 1000.0f;
-
- // Update the buffer level:
- // Note this is not the actual encoder buffer level.
- // |buffer_level_| is reset to an initial value after SelectResolution is
- // called, and does not account for frame dropping by encoder or VCM.
- buffer_level_ += per_frame_bandwidth_ - encoded_size_kbits;
-
- // Counter for occurrences of low buffer level:
- // low/negative values means encoder is likely dropping frames.
- if (buffer_level_ <= kPercBufferThr * kInitBufferLevel * target_bitrate_) {
- low_buffer_cnt_++;
- }
-}
-
-// Update various quantities after SetTargetRates in MediaOpt.
-void VCMQmResolution::UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss) {
- // Sum the target bitrate: this is the encoder rate from previous update
- // (~1sec), i.e, before the update for next ~1sec.
- sum_target_rate_ += target_bitrate_;
- update_rate_cnt_++;
-
- // Sum the received (from RTCP reports) packet loss rates.
- sum_packet_loss_ += static_cast<float>(packet_loss / 255.0);
-
- // Sum the sequence rate mismatch:
- // Mismatch here is based on the difference between the target rate
- // used (in previous ~1sec) and the average actual encoding rate measured
- // at previous ~1sec.
- float diff = target_bitrate_ - encoder_sent_rate;
- if (target_bitrate_ > 0.0)
- sum_rate_MM_ += fabs(diff) / target_bitrate_;
- int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
- // To check for consistent under(+)/over_shooting(-) of target rate.
- sum_rate_MM_sgn_ += sgnDiff;
-
- // Update with the current new target and frame rate:
- // these values are ones the encoder will use for the current/next ~1sec.
- target_bitrate_ = target_bitrate;
- incoming_framerate_ = incoming_framerate;
- sum_incoming_framerate_ += incoming_framerate_;
- // Update the per_frame_bandwidth:
- // this is the per_frame_bw for the current/next ~1sec.
- per_frame_bandwidth_ = 0.0f;
- if (incoming_framerate_ > 0.0f) {
- per_frame_bandwidth_ = target_bitrate_ / incoming_framerate_;
- }
-}
-
-// Select the resolution factors: frame size and frame rate change (qm scales).
-// Selection is for going down in resolution, or for going back up
-// (if a previous down-sampling action was taken).
-
-// In the current version the following constraints are imposed:
-// 1) We only allow for one action, either down or up, at a given time.
-// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
-// temporal/frame rate reduction by 1/2 and 2/3.
-// 3) The action for going back up is the reverse of last (spatial or temporal)
-// down-sampling action. The list of down-sampling actions from the
-// Initialize() state are kept in |down_action_history_|.
-// 4) The total amount of down-sampling (spatial and/or temporal) from the
-// Initialize() state (native resolution) is limited by various factors.
-int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
- if (!init_) {
- return VCM_UNINITIALIZED;
- }
- if (content_metrics_ == NULL) {
- Reset();
- *qm = qm_;
- return VCM_OK;
- }
-
- // Check conditions on down-sampling state.
- assert(state_dec_factor_spatial_ >= 1.0f);
- assert(state_dec_factor_temporal_ >= 1.0f);
- assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
- assert(state_dec_factor_temporal_ <= kMaxTempDown);
- assert(state_dec_factor_temporal_ * state_dec_factor_spatial_ <=
- kMaxTotalDown);
-
- // Compute content class for selection.
- content_class_ = ComputeContentClass();
- // Compute various rate quantities for selection.
- ComputeRatesForSelection();
-
- // Get the encoder state.
- ComputeEncoderState();
-
- // Default settings: no action.
- SetDefaultAction();
- *qm = qm_;
-
- // Check for going back up in resolution, if we have had some down-sampling
- // relative to native state in Initialize().
- if (down_action_history_[0].spatial != kNoChangeSpatial ||
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (GoingUpResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- }
-
- // Check for going down in resolution.
- if (GoingDownResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- return VCM_OK;
-}
-
-void VCMQmResolution::SetDefaultAction() {
- qm_->codec_width = width_;
- qm_->codec_height = height_;
- qm_->frame_rate = user_frame_rate_;
- qm_->change_resolution_spatial = false;
- qm_->change_resolution_temporal = false;
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 1.0f;
- qm_->temporal_fact = 1.0f;
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kNoChangeTemporal;
-}
-
-void VCMQmResolution::ComputeRatesForSelection() {
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- if (frame_cnt_ > 0) {
- avg_ratio_buffer_low_ =
- static_cast<float>(low_buffer_cnt_) / static_cast<float>(frame_cnt_);
- }
- if (update_rate_cnt_ > 0) {
- avg_rate_mismatch_ =
- static_cast<float>(sum_rate_MM_) / static_cast<float>(update_rate_cnt_);
- avg_rate_mismatch_sgn_ = static_cast<float>(sum_rate_MM_sgn_) /
- static_cast<float>(update_rate_cnt_);
- avg_target_rate_ = static_cast<float>(sum_target_rate_) /
- static_cast<float>(update_rate_cnt_);
- avg_incoming_framerate_ = static_cast<float>(sum_incoming_framerate_) /
- static_cast<float>(update_rate_cnt_);
- avg_packet_loss_ = static_cast<float>(sum_packet_loss_) /
- static_cast<float>(update_rate_cnt_);
- }
- // For selection we may want to weight some quantities more heavily
- // with the current (i.e., next ~1sec) rate values.
- avg_target_rate_ =
- kWeightRate * avg_target_rate_ + (1.0 - kWeightRate) * target_bitrate_;
- avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
- (1.0 - kWeightRate) * incoming_framerate_;
- // Use base layer frame rate for temporal layers: this will favor spatial.
- assert(num_layers_ > 0);
- framerate_level_ = FrameRateLevel(avg_incoming_framerate_ /
- static_cast<float>(1 << (num_layers_ - 1)));
-}
-
-void VCMQmResolution::ComputeEncoderState() {
- // Default.
- encoder_state_ = kStableEncoding;
-
- // Assign stressed state if:
- // 1) occurrences of low buffer levels is high, or
- // 2) rate mis-match is high, and consistent over-shooting by encoder.
- if ((avg_ratio_buffer_low_ > kMaxBufferLow) ||
- ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ < -kRateOverShoot))) {
- encoder_state_ = kStressedEncoding;
- }
- // Assign easy state if:
- // 1) rate mis-match is high, and
- // 2) consistent under-shooting by encoder.
- if ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ > kRateUnderShoot)) {
- encoder_state_ = kEasyEncoding;
- }
-}
-
-bool VCMQmResolution::GoingUpResolution() {
- // For going up, we check for undoing the previous down-sampling action.
-
- float fac_width = kFactorWidthSpatial[down_action_history_[0].spatial];
- float fac_height = kFactorHeightSpatial[down_action_history_[0].spatial];
- float fac_temp = kFactorTemporal[down_action_history_[0].temporal];
- // For going up spatially, we allow for going up by 3/4x3/4 at each stage.
- // So if the last spatial action was 1/2x1/2 it would be undone in 2 stages.
- // Modify the fac_width/height for this case.
- if (down_action_history_[0].spatial == kOneQuarterSpatialUniform) {
- fac_width = kFactorWidthSpatial[kOneQuarterSpatialUniform] /
- kFactorWidthSpatial[kOneHalfSpatialUniform];
- fac_height = kFactorHeightSpatial[kOneQuarterSpatialUniform] /
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
-
- // Check if we should go up both spatially and temporally.
- if (down_action_history_[0].spatial != kNoChangeSpatial &&
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (ConditionForGoingUp(fac_width, fac_height, fac_temp,
- kTransRateScaleUpSpatialTemp)) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- }
- // Check if we should go up either spatially or temporally.
- bool selected_up_spatial = false;
- bool selected_up_temporal = false;
- if (down_action_history_[0].spatial != kNoChangeSpatial) {
- selected_up_spatial = ConditionForGoingUp(fac_width, fac_height, 1.0f,
- kTransRateScaleUpSpatial);
- }
- if (down_action_history_[0].temporal != kNoChangeTemporal) {
- selected_up_temporal =
- ConditionForGoingUp(1.0f, 1.0f, fac_temp, kTransRateScaleUpTemp);
- }
- if (selected_up_spatial && !selected_up_temporal) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (!selected_up_spatial && selected_up_temporal) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (selected_up_spatial && selected_up_temporal) {
- PickSpatialOrTemporal();
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- return false;
-}
-
-bool VCMQmResolution::ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- float estimated_transition_rate_up =
- GetTransitionRate(fac_width, fac_height, fac_temp, scale_fac);
- // Go back up if:
- // 1) target rate is above threshold and current encoder state is stable, or
- // 2) encoder state is easy (encoder is significantly under-shooting target).
- if (((avg_target_rate_ > estimated_transition_rate_up) &&
- (encoder_state_ == kStableEncoding)) ||
- (encoder_state_ == kEasyEncoding)) {
- return true;
- } else {
- return false;
- }
-}
-
-bool VCMQmResolution::GoingDownResolution() {
- float estimated_transition_rate_down =
- GetTransitionRate(1.0f, 1.0f, 1.0f, 1.0f);
- float max_rate = kFrameRateFac[framerate_level_] * kMaxRateQm[image_type_];
- // Resolution reduction if:
- // (1) target rate is below transition rate, or
- // (2) encoder is in stressed state and target rate below a max threshold.
- if ((avg_target_rate_ < estimated_transition_rate_down) ||
- (encoder_state_ == kStressedEncoding && avg_target_rate_ < max_rate)) {
- // Get the down-sampling action: based on content class, and how low
- // average target rate is relative to transition rate.
- uint8_t spatial_fact =
- kSpatialAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
- uint8_t temp_fact =
- kTemporalAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
-
- switch (spatial_fact) {
- case 4: {
- action_.spatial = kOneQuarterSpatialUniform;
- break;
- }
- case 2: {
- action_.spatial = kOneHalfSpatialUniform;
- break;
- }
- case 1: {
- action_.spatial = kNoChangeSpatial;
- break;
- }
- default: { assert(false); }
- }
- switch (temp_fact) {
- case 3: {
- action_.temporal = kTwoThirdsTemporal;
- break;
- }
- case 2: {
- action_.temporal = kOneHalfTemporal;
- break;
- }
- case 1: {
- action_.temporal = kNoChangeTemporal;
- break;
- }
- default: { assert(false); }
- }
- // Only allow for one action (spatial or temporal) at a given time.
- assert(action_.temporal == kNoChangeTemporal ||
- action_.spatial == kNoChangeSpatial);
-
- // Adjust cases not captured in tables, mainly based on frame rate, and
- // also check for odd frame sizes.
- AdjustAction();
-
- // Update down-sampling state.
- if (action_.spatial != kNoChangeSpatial ||
- action_.temporal != kNoChangeTemporal) {
- UpdateDownsamplingState(kDownResolution);
- return true;
- }
- }
- return false;
-}
-
-float VCMQmResolution::GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- ImageType image_type =
- GetImageType(static_cast<uint16_t>(fac_width * width_),
- static_cast<uint16_t>(fac_height * height_));
-
- FrameRateLevelClass framerate_level =
- FrameRateLevel(fac_temp * avg_incoming_framerate_);
- // If we are checking for going up temporally, and this is the last
- // temporal action, then use native frame rate.
- if (down_action_history_[1].temporal == kNoChangeTemporal &&
- fac_temp > 1.0f) {
- framerate_level = FrameRateLevel(native_frame_rate_);
- }
-
- // The maximum allowed rate below which down-sampling is allowed:
- // Nominal values based on image format (frame size and frame rate).
- float max_rate = kFrameRateFac[framerate_level] * kMaxRateQm[image_type];
-
- uint8_t image_class = image_type > kVGA ? 1 : 0;
- uint8_t table_index = image_class * 9 + content_class_;
- // Scale factor for down-sampling transition threshold:
- // factor based on the content class and the image size.
- float scaleTransRate = kScaleTransRateQm[table_index];
- // Threshold bitrate for resolution action.
- return static_cast<float>(scale_fac * scaleTransRate * max_rate);
-}
-
-void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
- if (up_down == kUpResolution) {
- qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
- // If last spatial action was 1/2x1/2, we undo it in two steps, so the
- // spatial scale factor in this first step is modified as (4.0/3.0 / 2.0).
- if (action_.spatial == kOneQuarterSpatialUniform) {
- qm_->spatial_width_fact = 1.0f *
- kFactorWidthSpatial[kOneHalfSpatialUniform] /
- kFactorWidthSpatial[kOneQuarterSpatialUniform];
- qm_->spatial_height_fact =
- 1.0f * kFactorHeightSpatial[kOneHalfSpatialUniform] /
- kFactorHeightSpatial[kOneQuarterSpatialUniform];
- }
- qm_->temporal_fact = 1.0f / kFactorTemporal[action_.temporal];
- RemoveLastDownAction();
- } else if (up_down == kDownResolution) {
- ConstrainAmountOfDownSampling();
- ConvertSpatialFractionalToWhole();
- qm_->spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- qm_->temporal_fact = kFactorTemporal[action_.temporal];
- InsertLatestDownAction();
- } else {
- // This function should only be called if either the Up or Down action
- // has been selected.
- assert(false);
- }
- UpdateCodecResolution();
- state_dec_factor_spatial_ = state_dec_factor_spatial_ *
- qm_->spatial_width_fact *
- qm_->spatial_height_fact;
- state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
-}
-
-void VCMQmResolution::UpdateCodecResolution() {
- if (action_.spatial != kNoChangeSpatial) {
- qm_->change_resolution_spatial = true;
- qm_->codec_width =
- static_cast<uint16_t>(width_ / qm_->spatial_width_fact + 0.5f);
- qm_->codec_height =
- static_cast<uint16_t>(height_ / qm_->spatial_height_fact + 0.5f);
- // Size should not exceed native sizes.
- assert(qm_->codec_width <= native_width_);
- assert(qm_->codec_height <= native_height_);
- // New sizes should be multiple of 2, otherwise spatial should not have
- // been selected.
- assert(qm_->codec_width % 2 == 0);
- assert(qm_->codec_height % 2 == 0);
- }
- if (action_.temporal != kNoChangeTemporal) {
- qm_->change_resolution_temporal = true;
- // Update the frame rate based on the average incoming frame rate.
- qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
- if (down_action_history_[0].temporal == 0) {
- // When we undo the last temporal-down action, make sure we go back up
- // to the native frame rate. Since the incoming frame rate may
- // fluctuate over time, |avg_incoming_framerate_| scaled back up may
- // be smaller than |native_frame rate_|.
- qm_->frame_rate = native_frame_rate_;
- }
- }
-}
-
-uint8_t VCMQmResolution::RateClass(float transition_rate) {
- return avg_target_rate_ < (kFacLowRate * transition_rate)
- ? 0
- : (avg_target_rate_ >= transition_rate ? 2 : 1);
-}
-
-// TODO(marpan): Would be better to capture these frame rate adjustments by
-// extending the table data (qm_select_data.h).
-void VCMQmResolution::AdjustAction() {
- // If the spatial level is default state (neither low or high), motion level
- // is not high, and spatial action was selected, switch to 2/3 frame rate
- // reduction if the average incoming frame rate is high.
- if (spatial_.level == kDefault && motion_.level != kHigh &&
- action_.spatial != kNoChangeSpatial &&
- framerate_level_ == kFrameRateHigh) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // If both motion and spatial level are low, and temporal down action was
- // selected, switch to spatial 3/4x3/4 if the frame rate is not above the
- // lower middle level (|kFrameRateMiddle1|).
- if (motion_.level == kLow && spatial_.level == kLow &&
- framerate_level_ <= kFrameRateMiddle1 &&
- action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action is selected, and there has been too much spatial
- // reduction already (i.e., 1/4), then switch to temporal action if the
- // average frame rate is not low.
- if (action_.spatial != kNoChangeSpatial &&
- down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
- framerate_level_ != kFrameRateLow) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // Never use temporal action if number of temporal layers is above 2.
- if (num_layers_ > 2) {
- if (action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- }
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action was selected, we need to make sure the frame sizes
- // are multiples of two. Otherwise switch to 2/3 temporal.
- if (action_.spatial != kNoChangeSpatial && !EvenFrameSize()) {
- action_.spatial = kNoChangeSpatial;
- // Only one action (spatial or temporal) is allowed at a given time, so need
- // to check whether temporal action is currently selected.
- action_.temporal = kTwoThirdsTemporal;
- }
-}
-
-void VCMQmResolution::ConvertSpatialFractionalToWhole() {
- // If 3/4 spatial is selected, check if there has been another 3/4,
- // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
- // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
- if (action_.spatial == kOneHalfSpatialUniform) {
- bool found = false;
- int isel = kDownActionHistorySize;
- for (int i = 0; i < kDownActionHistorySize; ++i) {
- if (down_action_history_[i].spatial == kOneHalfSpatialUniform) {
- isel = i;
- found = true;
- break;
- }
- }
- if (found) {
- action_.spatial = kOneQuarterSpatialUniform;
- state_dec_factor_spatial_ =
- state_dec_factor_spatial_ /
- (kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform]);
- // Check if switching to 1/2x1/2 (=1/4) spatial is allowed.
- ConstrainAmountOfDownSampling();
- if (action_.spatial == kNoChangeSpatial) {
- // Not allowed. Go back to 3/4x3/4 spatial.
- action_.spatial = kOneHalfSpatialUniform;
- state_dec_factor_spatial_ =
- state_dec_factor_spatial_ *
- kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- } else {
- // Switching is allowed. Remove 3/4x3/4 from the history, and update
- // the frame size.
- for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
- }
- width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
- height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
- }
- }
-}
-
-// Returns false if the new frame sizes, under the current spatial action,
-// are not multiples of two.
-bool VCMQmResolution::EvenFrameSize() {
- if (action_.spatial == kOneHalfSpatialUniform) {
- if ((width_ * 3 / 4) % 2 != 0 || (height_ * 3 / 4) % 2 != 0) {
- return false;
- }
- } else if (action_.spatial == kOneQuarterSpatialUniform) {
- if ((width_ * 1 / 2) % 2 != 0 || (height_ * 1 / 2) % 2 != 0) {
- return false;
- }
- }
- return true;
-}
-
-void VCMQmResolution::InsertLatestDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].spatial = down_action_history_[i - 1].spatial;
- }
- down_action_history_[0].spatial = action_.spatial;
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].temporal = down_action_history_[i - 1].temporal;
- }
- down_action_history_[0].temporal = action_.temporal;
- }
-}
-
-void VCMQmResolution::RemoveLastDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- // If the last spatial action was 1/2x1/2 we replace it with 3/4x3/4.
- if (action_.spatial == kOneQuarterSpatialUniform) {
- down_action_history_[0].spatial = kOneHalfSpatialUniform;
- } else {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
- }
- down_action_history_[kDownActionHistorySize - 1].spatial =
- kNoChangeSpatial;
- }
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].temporal = down_action_history_[i + 1].temporal;
- }
- down_action_history_[kDownActionHistorySize - 1].temporal =
- kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::ConstrainAmountOfDownSampling() {
- // Sanity checks on down-sampling selection:
- // override the settings for too small image size and/or frame rate.
- // Also check the limit on current down-sampling states.
-
- float spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- float spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- float temporal_fact = kFactorTemporal[action_.temporal];
- float new_dec_factor_spatial =
- state_dec_factor_spatial_ * spatial_width_fact * spatial_height_fact;
- float new_dec_factor_temp = state_dec_factor_temporal_ * temporal_fact;
-
- // No spatial sampling if current frame size is too small, or if the
- // amount of spatial down-sampling is above maximum spatial down-action.
- if ((width_ * height_) <= kMinImageSize ||
- new_dec_factor_spatial > kMaxSpatialDown) {
- action_.spatial = kNoChangeSpatial;
- new_dec_factor_spatial = state_dec_factor_spatial_;
- }
- // No frame rate reduction if average frame rate is below some point, or if
- // the amount of temporal down-sampling is above maximum temporal down-action.
- if (avg_incoming_framerate_ <= kMinFrameRate ||
- new_dec_factor_temp > kMaxTempDown) {
- action_.temporal = kNoChangeTemporal;
- new_dec_factor_temp = state_dec_factor_temporal_;
- }
- // Check if the total (spatial-temporal) down-action is above maximum allowed,
- // if so, disallow the current selected down-action.
- if (new_dec_factor_spatial * new_dec_factor_temp > kMaxTotalDown) {
- if (action_.spatial != kNoChangeSpatial) {
- action_.spatial = kNoChangeSpatial;
- } else if (action_.temporal != kNoChangeTemporal) {
- action_.temporal = kNoChangeTemporal;
- } else {
- // We only allow for one action (spatial or temporal) at a given time, so
- // either spatial or temporal action is selected when this function is
- // called. If the selected action is disallowed from one of the above
- // 2 prior conditions (on spatial & temporal max down-action), then this
- // condition "total down-action > |kMaxTotalDown|" would not be entered.
- assert(false);
- }
- }
-}
-
-void VCMQmResolution::PickSpatialOrTemporal() {
- // Pick the one that has had the most down-sampling thus far.
- if (state_dec_factor_spatial_ > state_dec_factor_temporal_) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- } else {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- }
-}
-
-// TODO(marpan): Update when we allow for directional spatial down-sampling.
-void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
- // Default is 4/3x4/3
- // For bit rates well below transitional rate, we select 2x2.
- if (avg_target_rate_ < transition_rate * kRateRedSpatial2X2) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 2.0f;
- }
- // Otherwise check prediction errors and aspect ratio.
- float spatial_err = 0.0f;
- float spatial_err_h = 0.0f;
- float spatial_err_v = 0.0f;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
-
- // Favor 1x2 if aspect_ratio is 16:9.
- if (aspect_ratio_ >= 16.0f / 9.0f) {
- // Check if 1x2 has lowest prediction error.
- if (spatial_err_h < spatial_err && spatial_err_h < spatial_err_v) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 1.0f;
- }
- }
- // Check for 4/3x4/3 selection: favor 2x2 over 1x2 and 2x1.
- if (spatial_err < spatial_err_h * (1.0f + kSpatialErr2x2VsHoriz) &&
- spatial_err < spatial_err_v * (1.0f + kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 4.0f / 3.0f;
- qm_->spatial_height_fact = 4.0f / 3.0f;
- }
- // Check for 2x1 selection.
- if (spatial_err_v < spatial_err_h * (1.0f - kSpatialErrVertVsHoriz) &&
- spatial_err_v < spatial_err * (1.0f - kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 2.0f;
- }
-}
-
-// ROBUSTNESS CLASS
-
-VCMQmRobustness::VCMQmRobustness() {
- Reset();
-}
-
-VCMQmRobustness::~VCMQmRobustness() {}
-
-void VCMQmRobustness::Reset() {
- prev_total_rate_ = 0.0f;
- prev_rtt_time_ = 0;
- prev_packet_loss_ = 0;
- prev_code_rate_delta_ = 0;
- ResetQM();
-}
-
-// Adjust the FEC rate based on the content and the network state
-// (packet loss rate, total rate/bandwidth, round trip time).
-// Note that packetLoss here is the filtered loss value.
-float VCMQmRobustness::AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss) {
- // Default: no adjustment
- float adjust_fec = 1.0f;
- if (content_metrics_ == NULL) {
- return adjust_fec;
- }
- // Compute class state of the content.
- ComputeMotionNFD();
- ComputeSpatial();
-
- // TODO(marpan): Set FEC adjustment factor.
-
- // Keep track of previous values of network state:
- // adjustment may be also based on pattern of changes in network state.
- prev_total_rate_ = total_rate;
- prev_rtt_time_ = rtt_time;
- prev_packet_loss_ = packet_loss;
- prev_code_rate_delta_ = code_rate_delta;
- return adjust_fec;
-}
-
-// Set the UEP (unequal-protection across packets) on/off for the FEC.
-bool VCMQmRobustness::SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type) {
- // Default.
- return false;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select.h b/chromium/third_party/webrtc/modules/video_coding/qm_select.h
deleted file mode 100644
index 764b5ed8e37..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select.h
+++ /dev/null
@@ -1,356 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/typedefs.h"
-
-/******************************************************/
-/* Quality Modes: Resolution and Robustness settings */
-/******************************************************/
-
-namespace webrtc {
-struct VideoContentMetrics;
-
-struct VCMResolutionScale {
- VCMResolutionScale()
- : codec_width(640),
- codec_height(480),
- frame_rate(30.0f),
- spatial_width_fact(1.0f),
- spatial_height_fact(1.0f),
- temporal_fact(1.0f),
- change_resolution_spatial(false),
- change_resolution_temporal(false) {}
- uint16_t codec_width;
- uint16_t codec_height;
- float frame_rate;
- float spatial_width_fact;
- float spatial_height_fact;
- float temporal_fact;
- bool change_resolution_spatial;
- bool change_resolution_temporal;
-};
-
-enum ImageType {
- kQCIF = 0, // 176x144
- kHCIF, // 264x216 = half(~3/4x3/4) CIF.
- kQVGA, // 320x240 = quarter VGA.
- kCIF, // 352x288
- kHVGA, // 480x360 = half(~3/4x3/4) VGA.
- kVGA, // 640x480
- kQFULLHD, // 960x540 = quarter FULLHD, and half(~3/4x3/4) WHD.
- kWHD, // 1280x720
- kFULLHD, // 1920x1080
- kNumImageTypes
-};
-
-const uint32_t kSizeOfImageType[kNumImageTypes] = {
- 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600};
-
-enum FrameRateLevelClass {
- kFrameRateLow,
- kFrameRateMiddle1,
- kFrameRateMiddle2,
- kFrameRateHigh
-};
-
-enum ContentLevelClass { kLow, kHigh, kDefault };
-
-struct VCMContFeature {
- VCMContFeature() : value(0.0f), level(kDefault) {}
- void Reset() {
- value = 0.0f;
- level = kDefault;
- }
- float value;
- ContentLevelClass level;
-};
-
-enum UpDownAction { kUpResolution, kDownResolution };
-
-enum SpatialAction {
- kNoChangeSpatial,
- kOneHalfSpatialUniform, // 3/4 x 3/4: 9/6 ~1/2 pixel reduction.
- kOneQuarterSpatialUniform, // 1/2 x 1/2: 1/4 pixel reduction.
- kNumModesSpatial
-};
-
-enum TemporalAction {
- kNoChangeTemporal,
- kTwoThirdsTemporal, // 2/3 frame rate reduction
- kOneHalfTemporal, // 1/2 frame rate reduction
- kNumModesTemporal
-};
-
-struct ResolutionAction {
- ResolutionAction() : spatial(kNoChangeSpatial), temporal(kNoChangeTemporal) {}
- SpatialAction spatial;
- TemporalAction temporal;
-};
-
-// Down-sampling factors for spatial (width and height), and temporal.
-const float kFactorWidthSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
-
-const float kFactorHeightSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
-
-const float kFactorTemporal[kNumModesTemporal] = {1.0f, 1.5f, 2.0f};
-
-enum EncoderState {
- kStableEncoding, // Low rate mis-match, stable buffer levels.
- kStressedEncoding, // Significant over-shooting of target rate,
- // Buffer under-flow, etc.
- kEasyEncoding // Significant under-shooting of target rate.
-};
-
-// QmMethod class: main class for resolution and robustness settings
-
-class VCMQmMethod {
- public:
- VCMQmMethod();
- virtual ~VCMQmMethod();
-
- // Reset values
- void ResetQM();
- virtual void Reset() = 0;
-
- // Compute content class.
- uint8_t ComputeContentClass();
-
- // Update with the content metrics.
- void UpdateContent(const VideoContentMetrics* content_metrics);
-
- // Compute spatial texture magnitude and level.
- // Spatial texture is a spatial prediction error measure.
- void ComputeSpatial();
-
- // Compute motion magnitude and level for NFD metric.
- // NFD is normalized frame difference (normalized by spatial variance).
- void ComputeMotionNFD();
-
- // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
- ImageType GetImageType(uint16_t width, uint16_t height);
-
- // Return the closest image type.
- ImageType FindClosestImageType(uint16_t width, uint16_t height);
-
- // Get the frame rate level.
- FrameRateLevelClass FrameRateLevel(float frame_rate);
-
- protected:
- // Content Data.
- const VideoContentMetrics* content_metrics_;
-
- // Encoder frame sizes and native frame sizes.
- uint16_t width_;
- uint16_t height_;
- float user_frame_rate_;
- uint16_t native_width_;
- uint16_t native_height_;
- float native_frame_rate_;
- float aspect_ratio_;
- // Image type and frame rate leve, for the current encoder resolution.
- ImageType image_type_;
- FrameRateLevelClass framerate_level_;
- // Content class data.
- VCMContFeature motion_;
- VCMContFeature spatial_;
- uint8_t content_class_;
- bool init_;
-};
-
-// Resolution settings class
-
-class VCMQmResolution : public VCMQmMethod {
- public:
- VCMQmResolution();
- virtual ~VCMQmResolution();
-
- // Reset all quantities.
- virtual void Reset();
-
- // Reset rate quantities and counters after every SelectResolution() call.
- void ResetRates();
-
- // Reset down-sampling state.
- void ResetDownSamplingState();
-
- // Get the encoder state.
- EncoderState GetEncoderState();
-
- // Initialize after SetEncodingData in media_opt.
- int Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers);
-
- // Update the encoder frame size.
- void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
-
- // Update with actual bit rate (size of the latest encoded frame)
- // and frame type, after every encoded frame.
- void UpdateEncodedSize(size_t encoded_size);
-
- // Update with new target bitrate, actual encoder sent rate, frame_rate,
- // loss rate: every ~1 sec from SetTargetRates in media_opt.
- void UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss);
-
- // Extract ST (spatio-temporal) resolution action.
- // Inputs: qm: Reference to the quality modes pointer.
- // Output: the spatial and/or temporal scale change.
- int SelectResolution(VCMResolutionScale** qm);
-
- private:
- // Set the default resolution action.
- void SetDefaultAction();
-
- // Compute rates for the selection of down-sampling action.
- void ComputeRatesForSelection();
-
- // Compute the encoder state.
- void ComputeEncoderState();
-
- // Return true if the action is to go back up in resolution.
- bool GoingUpResolution();
-
- // Return true if the action is to go down in resolution.
- bool GoingDownResolution();
-
- // Check the condition for going up in resolution by the scale factors:
- // |facWidth|, |facHeight|, |facTemp|.
- // |scaleFac| is a scale factor for the transition rate.
- bool ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Get the bitrate threshold for the resolution action.
- // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
- // |scaleFac| is a scale factor for the transition rate.
- float GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Update the down-sampling state.
- void UpdateDownsamplingState(UpDownAction up_down);
-
- // Update the codec frame size and frame rate.
- void UpdateCodecResolution();
-
- // Return a state based on average target rate relative transition rate.
- uint8_t RateClass(float transition_rate);
-
- // Adjust the action selected from the table.
- void AdjustAction();
-
- // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
- void ConvertSpatialFractionalToWhole();
-
- // Returns true if the new frame sizes, under the selected spatial action,
- // are of even size.
- bool EvenFrameSize();
-
- // Insert latest down-sampling action into the history list.
- void InsertLatestDownAction();
-
- // Remove the last (first element) down-sampling action from the list.
- void RemoveLastDownAction();
-
- // Check constraints on the amount of down-sampling allowed.
- void ConstrainAmountOfDownSampling();
-
- // For going up in resolution: pick spatial or temporal action,
- // if both actions were separately selected.
- void PickSpatialOrTemporal();
-
- // Select the directional (1x2 or 2x1) spatial down-sampling action.
- void SelectSpatialDirectionMode(float transition_rate);
-
- enum { kDownActionHistorySize = 10 };
-
- VCMResolutionScale* qm_;
- // Encoder rate control parameters.
- float target_bitrate_;
- float incoming_framerate_;
- float per_frame_bandwidth_;
- float buffer_level_;
-
- // Data accumulated every ~1sec from MediaOpt.
- float sum_target_rate_;
- float sum_incoming_framerate_;
- float sum_rate_MM_;
- float sum_rate_MM_sgn_;
- float sum_packet_loss_;
- // Counters.
- uint32_t frame_cnt_;
- uint32_t frame_cnt_delta_;
- uint32_t update_rate_cnt_;
- uint32_t low_buffer_cnt_;
-
- // Resolution state parameters.
- float state_dec_factor_spatial_;
- float state_dec_factor_temporal_;
-
- // Quantities used for selection.
- float avg_target_rate_;
- float avg_incoming_framerate_;
- float avg_ratio_buffer_low_;
- float avg_rate_mismatch_;
- float avg_rate_mismatch_sgn_;
- float avg_packet_loss_;
- EncoderState encoder_state_;
- ResolutionAction action_;
- // Short history of the down-sampling actions from the Initialize() state.
- // This is needed for going up in resolution. Since the total amount of
- // down-sampling actions are constrained, the length of the list need not be
- // large: i.e., (4/3) ^{kDownActionHistorySize} <= kMaxDownSample.
- ResolutionAction down_action_history_[kDownActionHistorySize];
- int num_layers_;
-};
-
-// Robustness settings class.
-
-class VCMQmRobustness : public VCMQmMethod {
- public:
- VCMQmRobustness();
- ~VCMQmRobustness();
-
- virtual void Reset();
-
- // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
- // Returns an adjustment factor.
- float AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss);
-
- // Set the UEP protection on/off.
- bool SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type);
-
- private:
- // Previous state of network parameters.
- float prev_total_rate_;
- int64_t prev_rtt_time_;
- uint8_t prev_packet_loss_;
- uint8_t prev_code_rate_delta_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h b/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h
deleted file mode 100644
index 49190ef53b9..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
-#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
-
-/***************************************************************
-*QMSelectData.h
-* This file includes parameters for content-aware media optimization
-****************************************************************/
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-//
-// PARAMETERS FOR RESOLUTION ADAPTATION
-//
-
-// Initial level of buffer in secs.
-const float kInitBufferLevel = 0.5f;
-
-// Threshold of (max) buffer size below which we consider too low (underflow).
-const float kPercBufferThr = 0.10f;
-
-// Threshold on the occurrences of low buffer levels.
-const float kMaxBufferLow = 0.30f;
-
-// Threshold on rate mismatch.
-const float kMaxRateMisMatch = 0.5f;
-
-// Threshold on amount of under/over encoder shooting.
-const float kRateOverShoot = 0.75f;
-const float kRateUnderShoot = 0.75f;
-
-// Factor to favor weighting the average rates with the current/last data.
-const float kWeightRate = 0.70f;
-
-// Factor for transitional rate for going back up in resolution.
-const float kTransRateScaleUpSpatial = 1.25f;
-const float kTransRateScaleUpTemp = 1.25f;
-const float kTransRateScaleUpSpatialTemp = 1.25f;
-
-// Threshold on packet loss rate, above which favor resolution reduction.
-const float kPacketLossThr = 0.1f;
-
-// Factor for reducing transitional bitrate under packet loss.
-const float kPacketLossRateFac = 1.0f;
-
-// Maximum possible transitional rate for down-sampling:
-// (units in kbps), for 30fps.
-const uint16_t kMaxRateQm[9] = {
- 0, // QCIF
- 50, // kHCIF
- 125, // kQVGA
- 200, // CIF
- 280, // HVGA
- 400, // VGA
- 700, // QFULLHD
- 1000, // WHD
- 1500 // FULLHD
-};
-
-// Frame rate scale for maximum transition rate.
-const float kFrameRateFac[4] = {
- 0.5f, // Low
- 0.7f, // Middle level 1
- 0.85f, // Middle level 2
- 1.0f, // High
-};
-
-// Scale for transitional rate: based on content class
-// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
-const float kScaleTransRateQm[18] = {
- // VGA and lower
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-
- // over VGA
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-};
-
-// Threshold on the target rate relative to transitional rate.
-const float kFacLowRate = 0.5f;
-
-// Action for down-sampling:
-// motion=L/H/D,spatial==L/H/D, for low, high, middle levels;
-// rate = 0/1/2, for target rate state relative to transition rate.
-const uint8_t kSpatialAction[27] = {
- // rateClass = 0:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 4, // H ,L
- 1, // H, H
- 4, // H, D
- 4, // D, L
- 1, // D, H
- 2, // D, D
-
- // rateClass = 1:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-
- // rateClass = 2:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-};
-
-const uint8_t kTemporalAction[27] = {
- // rateClass = 0:
- 3, // L, L
- 2, // L, H
- 2, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 2, // D, H
- 1, // D, D
-
- // rateClass = 1:
- 3, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-
- // rateClass = 2:
- 1, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-};
-
-// Control the total amount of down-sampling allowed.
-const float kMaxSpatialDown = 8.0f;
-const float kMaxTempDown = 3.0f;
-const float kMaxTotalDown = 9.0f;
-
-// Minimum image size for a spatial down-sampling.
-const int kMinImageSize = 176 * 144;
-
-// Minimum frame rate for temporal down-sampling:
-// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE.
-const int kMinFrameRate = 8;
-
-//
-// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
-//
-
-//
-// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
-//
-
-// Thresholds for frame rate:
-const int kLowFrameRate = 10;
-const int kMiddleFrameRate = 15;
-const int kHighFrameRate = 25;
-
-// Thresholds for motion: motion level is from NFD.
-const float kHighMotionNfd = 0.075f;
-const float kLowMotionNfd = 0.03f;
-
-// Thresholds for spatial prediction error:
-// this is applied on the average of (2x2,1x2,2x1).
-const float kHighTexture = 0.035f;
-const float kLowTexture = 0.020f;
-
-// Used to reduce thresholds for larger/HD scenes: correction factor since
-// higher correlation in HD scenes means lower spatial prediction error.
-const float kScaleTexture = 0.9f;
-
-// Percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1.
-const float kRateRedSpatial2X2 = 0.6f;
-
-const float kSpatialErr2x2VsHoriz = 0.1f; // percentage to favor 2x2 over H
-const float kSpatialErr2X2VsVert = 0.1f; // percentage to favor 2x2 over V
-const float kSpatialErrVertVsHoriz = 0.1f; // percentage to favor H over V
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc
deleted file mode 100644
index f8542ec6763..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc
+++ /dev/null
@@ -1,1307 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests the QmResolution class
- * In particular, for the selection of spatial and/or temporal down-sampling.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/qm_select.h"
-
-namespace webrtc {
-
-// Representative values of content metrics for: low/high/medium(default) state,
-// based on parameters settings in qm_select_data.h.
-const float kSpatialLow = 0.01f;
-const float kSpatialMedium = 0.03f;
-const float kSpatialHigh = 0.1f;
-const float kTemporalLow = 0.01f;
-const float kTemporalMedium = 0.06f;
-const float kTemporalHigh = 0.1f;
-
-class QmSelectTest : public ::testing::Test {
- protected:
- QmSelectTest()
- : qm_resolution_(new VCMQmResolution()),
- content_metrics_(new VideoContentMetrics()),
- qm_scale_(NULL) {}
- VCMQmResolution* qm_resolution_;
- VideoContentMetrics* content_metrics_;
- VCMResolutionScale* qm_scale_;
-
- void InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers);
-
- void UpdateQmEncodedFrame(size_t* encoded_size, size_t num_updates);
-
- void UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates);
-
- void UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert);
-
- bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate);
-
- void TearDown() {
- delete qm_resolution_;
- delete content_metrics_;
- }
-};
-
-TEST_F(QmSelectTest, HandleInputs) {
- // Expect parameter error. Initialize with invalid inputs.
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480, 1));
-
- // Expect uninitialized error.: No valid initialization before selection.
- EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
-
- VideoContentMetrics* content_metrics = NULL;
- EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480, 1));
- qm_resolution_->UpdateContent(content_metrics);
- // Content metrics are NULL: Expect success and no down-sampling action.
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480, 30.0f));
-}
-
-// TODO(marpan): Add a test for number of temporal layers > 1.
-
-// No down-sampling action at high rates.
-TEST_F(QmSelectTest, NoActionHighRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(800, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {800, 800, 800};
- int encoder_sent_rate[] = {800, 800, 800};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Rate is well below transition, down-sampling action is taken,
-// depending on the content state.
-TEST_F(QmSelectTest, DownActionLowRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, low spatial: 2/3 temporal is expected.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, high spatial: 2/3 temporal expected.
- UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, medium spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- // Target frame rate for frame dropper should be the same as previous == 15.
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, medium spatial: high frame rate, so 2/3 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Rate mis-match is high, and we have over-shooting.
-// since target rate is below max for down-sampling, down-sampling is selected.
-TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {900, 900, 900};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Rate mis-match is high, target rate is below max for down-sampling,
-// but since we have consistent under-shooting, no down-sampling action.
-TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Buffer is underflowing, and target rate is below max for down-sampling,
-// so action is taken.
-TEST_F(QmSelectTest, DownActionBufferUnderflow) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
- size_t encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {450, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Target rate is below max for down-sampling, but buffer level is stable,
-// so no action is taken.
-TEST_F(QmSelectTest, NoActionBufferStable) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(350, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
- size_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {350, 350, 350};
- int encoder_sent_rate[] = {350, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Very low rate, but no spatial down-sampling below some size (QCIF).
-TEST_F(QmSelectTest, LimitDownSpatialAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 30, 176, 144, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 176;
- uint16_t codec_height = 144;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144, 30.0f));
-}
-
-// Very low rate, but no frame reduction below some frame_rate (8fps).
-TEST_F(QmSelectTest, LimitDownTemporalAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 8, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {8, 8, 8};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, medium spatial.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 8.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset and go up in rate: expected to go back up, in 2 stages of 3/4.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- // Goes up spatially in two stages for 1/2x1/2 down-sampling.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {200, 200, 200, 200, 200};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240, 30.0f));
-}
-
-// Two stages: temporally down-sample and then back up temporally,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- // Reset rates and go up in rate: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
-}
-
-// Two stages: temporal down-sample and then back up temporally, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
-}
-
-// Two stages: temporal down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {600, 600, 600, 600, 600};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 15.0f));
-}
-// 3 stages: spatial down-sample, followed by temporal down-sample,
-// and then go up to full state, as encoding rate has increased.
-TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Change content data: expect temporal down-sample.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
-
- // Reset rates and go lower in rate.
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Reset rates and go high up in rate: expect to go back up both spatial
- // and temporally. The 1/2x1/2 spatial is undone in two stages.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// No down-sampling below some total amount.
-TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 1280, 720, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 1280;
- uint16_t codec_height = 720;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360, 30.0f));
-
- // Reset and lower rates to get another spatial action (3/4x3/4).
- // Lower the frame rate for spatial to be selected again.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 640, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {70, 70, 70, 70, 70};
- int encoder_sent_rate2[] = {70, 70, 70, 70, 70};
- int incoming_frame_rate2[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, medium spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 270, 10.0f));
-
- // Reset and go to very low rate: no action should be taken,
- // we went down too much already.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 480, 270);
- EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
- // Update rates for a sequence of intervals.
- int target_rate3[] = {10, 10, 10, 10, 10};
- int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
- int incoming_frame_rate3[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270, 10.0f));
-}
-
-// Multiple down-sampling stages and then undo all of them.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// followed by spatial 3/4x3/4. Then go up to full state,
-// as encoding rate has increased.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory1) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down spatial 3/4x3/4.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
-
- // Go down 3/4x3/4 spatial:
- qm_resolution_->UpdateCodecParameters(20.0f, 480, 360);
- qm_resolution_->ResetRates();
- int target_rate3[] = {80, 80, 80, 80, 80};
- int encoder_sent_rate3[] = {80, 80, 80, 80, 80};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- // The two spatial actions of 3/4x3/4 are converted to 1/2x1/2,
- // so scale factor is 2.0.
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 20.0f));
-
- // Reset rates and go high up in rate: expect to go up:
- // 1/2x1x2 spatial and 1/2 temporally.
-
- // Go up 1/2x1/2 spatially and 1/2 temporally. Spatial is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(15.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate4[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 1/2x1/2, followed by temporal down-sample 2/3, undo the
-// temporal, then another temporal, and then undo both spatial and temporal.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory2) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 1/2x1/2 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, high spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Go up 2/3 temporally.
- qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate3[] = {150, 150, 150, 150, 150};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 320,
- 240, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate4[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate4[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Go up spatial and temporal. Spatial undoing is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(20.5f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate5[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost5[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate5, encoder_sent_rate5, incoming_frame_rate5,
- fraction_lost5, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// undo the temporal 2/3, and then undo the spatial.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory3) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(100, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {100, 100, 100};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
-
- // Go up 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(20.5f, 480, 360);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {250, 250, 250, 250, 250};
- int encoder_sent_rate3[] = {250, 250, 250, 250, 250};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 120};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- // Go up spatial.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate4[] = {500, 500, 500, 500, 500};
- int encoder_sent_rate4[] = {500, 500, 500, 500, 500};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
-TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-}
-
-void QmSelectTest::InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers) {
- EXPECT_EQ(
- 0, qm_resolution_->Initialize(initial_bit_rate, user_frame_rate,
- native_width, native_height, num_layers));
-}
-
-void QmSelectTest::UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert) {
- content_metrics_->motion_magnitude = motion_metric;
- content_metrics_->spatial_pred_err = spatial_metric;
- content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
- content_metrics_->spatial_pred_err_v = spatial_metric_vert;
- qm_resolution_->UpdateContent(content_metrics_);
-}
-
-void QmSelectTest::UpdateQmEncodedFrame(size_t* encoded_size,
- size_t num_updates) {
- for (size_t i = 0; i < num_updates; ++i) {
- // Convert to bytes.
- size_t encoded_size_update = 1000 * encoded_size[i] / 8;
- qm_resolution_->UpdateEncodedSize(encoded_size_update);
- }
-}
-
-void QmSelectTest::UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates) {
- for (int i = 0; i < num_updates; ++i) {
- float target_rate_update = target_rate[i];
- float encoder_sent_rate_update = encoder_sent_rate[i];
- float incoming_frame_rate_update = incoming_frame_rate[i];
- uint8_t fraction_lost_update = fraction_lost[i];
- qm_resolution_->UpdateRates(target_rate_update, encoder_sent_rate_update,
- incoming_frame_rate_update,
- fraction_lost_update);
- }
-}
-
-// Check is the selected action from the QmResolution class is the same
-// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
-bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate) {
- if (qm_scale->spatial_width_fact == fac_width &&
- qm_scale->spatial_height_fact == fac_height &&
- qm_scale->temporal_fact == fac_temp &&
- qm_scale->codec_width == new_width &&
- qm_scale->codec_height == new_height &&
- qm_scale->frame_rate == new_frame_rate) {
- return true;
- } else {
- return false;
- }
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver.cc b/chromium/third_party/webrtc/modules/video_coding/receiver.cc
index a02fd01de6a..1954df94e73 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver.cc
@@ -42,12 +42,17 @@ VCMReceiver::VCMReceiver(VCMTiming* timing,
EventFactory* event_factory,
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender)
- : VCMReceiver(timing,
- clock,
- std::unique_ptr<EventWrapper>(event_factory->CreateEvent()),
- std::unique_ptr<EventWrapper>(event_factory->CreateEvent()),
- nack_sender,
- keyframe_request_sender) {}
+ : VCMReceiver(
+ timing,
+ clock,
+ std::unique_ptr<EventWrapper>(event_factory
+ ? event_factory->CreateEvent()
+ : EventWrapper::Create()),
+ std::unique_ptr<EventWrapper>(event_factory
+ ? event_factory->CreateEvent()
+ : EventWrapper::Create()),
+ nack_sender,
+ keyframe_request_sender) {}
VCMReceiver::VCMReceiver(VCMTiming* timing,
Clock* clock,
@@ -281,24 +286,6 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
return 0;
}
-int VCMReceiver::RenderBufferSizeMs() {
- uint32_t timestamp_start = 0u;
- uint32_t timestamp_end = 0u;
- // Render timestamps are computed just prior to decoding. Therefore this is
- // only an estimate based on frames' timestamps and current timing state.
- jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
- if (timestamp_start == timestamp_end) {
- return 0;
- }
- // Update timing.
- const int64_t now_ms = clock_->TimeInMilliseconds();
- timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
- // Get render timestamps.
- uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
- uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
- return render_end - render_start;
-}
-
void VCMReceiver::RegisterStatsCallback(
VCMReceiveStatisticsCallback* callback) {
jitter_buffer_.RegisterStatsCallback(callback);
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver.h b/chromium/third_party/webrtc/modules/video_coding/receiver.h
index a4c55e967cb..dbef62a716c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver.h
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver.h
@@ -90,11 +90,6 @@ class VCMReceiver {
void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
VCMDecodeErrorMode DecodeErrorMode() const;
- // Returns size in time (milliseconds) of complete continuous frames in the
- // jitter buffer. The render time is estimated based on the render delay at
- // the time this function is called.
- int RenderBufferSizeMs();
-
void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
void TriggerDecoderShutdown();
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
index 42cc9ac0a88..d05957f6f0d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
@@ -92,66 +92,6 @@ class TestVCMReceiver : public ::testing::Test {
std::unique_ptr<StreamGenerator> stream_generator_;
};
-TEST_F(TestVCMReceiver, RenderBufferSize_AllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(num_of_frames * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_SkipToKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- const int kNumOfNonDecodableFrames = 2;
- for (int i = 0; i < kNumOfNonDecodableFrames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- const int kNumOfFrames = 10;
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- for (int i = 0; i < kNumOfFrames - 1; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((kNumOfFrames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NotAllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- num_of_frames++;
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((num_of_frames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- int64_t next_render_time_ms = 0;
- VCMEncodedFrame* frame =
- receiver_.FrameForDecoding(10, &next_render_time_ms, false);
- EXPECT_TRUE(frame == NULL);
- receiver_.ReleaseFrame(frame);
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
-}
-
TEST_F(TestVCMReceiver, NonDecodableDuration_Empty) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
new file mode 100644
index 00000000000..2ddfada74e5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
@@ -0,0 +1,486 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/rtp_frame_reference_finder.h"
+
+#include <algorithm>
+#include <limits>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/packet_buffer.h"
+
+namespace webrtc {
+namespace video_coding {
+
+RtpFrameReferenceFinder::RtpFrameReferenceFinder(
+ OnCompleteFrameCallback* frame_callback)
+ : last_picture_id_(-1),
+ last_unwrap_(-1),
+ current_ss_idx_(0),
+ frame_callback_(frame_callback) {}
+
+void RtpFrameReferenceFinder::ManageFrame(
+ std::unique_ptr<RtpFrameObject> frame) {
+ rtc::CritScope lock(&crit_);
+ switch (frame->codec_type()) {
+ case kVideoCodecULPFEC:
+ case kVideoCodecRED:
+ case kVideoCodecUnknown:
+ RTC_NOTREACHED();
+ break;
+ case kVideoCodecVP8:
+ ManageFrameVp8(std::move(frame));
+ break;
+ case kVideoCodecVP9:
+ ManageFrameVp9(std::move(frame));
+ break;
+ case kVideoCodecH264:
+ case kVideoCodecI420:
+ case kVideoCodecGeneric:
+ ManageFrameGeneric(std::move(frame));
+ break;
+ }
+}
+
+void RtpFrameReferenceFinder::RetryStashedFrames() {
+ size_t num_stashed_frames = stashed_frames_.size();
+
+ // Clean up stashed frames if there are too many.
+ while (stashed_frames_.size() > kMaxStashedFrames)
+ stashed_frames_.pop();
+
+ // Since frames are stashed if there is not enough data to determine their
+ // frame references we should at most check |stashed_frames_.size()| in
+ // order to not pop and push frames in and endless loop.
+ for (size_t i = 0; i < num_stashed_frames && !stashed_frames_.empty(); ++i) {
+ std::unique_ptr<RtpFrameObject> frame = std::move(stashed_frames_.front());
+ stashed_frames_.pop();
+ ManageFrame(std::move(frame));
+ }
+}
+
+void RtpFrameReferenceFinder::ManageFrameGeneric(
+ std::unique_ptr<RtpFrameObject> frame) {
+ if (frame->frame_type() == kVideoFrameKey)
+ last_seq_num_gop_[frame->last_seq_num()] = frame->last_seq_num();
+
+ // We have received a frame but not yet a keyframe, stash this frame.
+ if (last_seq_num_gop_.empty()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ // Clean up info for old keyframes but make sure to keep info
+ // for the last keyframe.
+ auto clean_to = last_seq_num_gop_.lower_bound(frame->last_seq_num() - 100);
+ if (clean_to != last_seq_num_gop_.end())
+ last_seq_num_gop_.erase(last_seq_num_gop_.begin(), clean_to);
+
+ // Find the last sequence number of the last frame for the keyframe
+ // that this frame indirectly references.
+ auto seq_num_it = last_seq_num_gop_.upper_bound(frame->last_seq_num());
+ seq_num_it--;
+
+ // Make sure the packet sequence numbers are continuous, otherwise stash
+ // this frame.
+ if (frame->frame_type() == kVideoFrameDelta) {
+ if (seq_num_it->second !=
+ static_cast<uint16_t>(frame->first_seq_num() - 1)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+ }
+
+ RTC_DCHECK(AheadOrAt(frame->last_seq_num(), seq_num_it->first));
+
+ // Since keyframes can cause reordering we can't simply assign the
+ // picture id according to some incrementing counter.
+ frame->picture_id = frame->last_seq_num();
+ frame->num_references = frame->frame_type() == kVideoFrameDelta;
+ frame->references[0] = seq_num_it->second;
+ seq_num_it->second = frame->picture_id;
+
+ last_picture_id_ = frame->picture_id;
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+void RtpFrameReferenceFinder::ManageFrameVp8(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP8& codec_header = rtp_codec_header->VP8;
+
+ if (codec_header.pictureId == kNoPictureId ||
+ codec_header.temporalIdx == kNoTemporalIdx ||
+ codec_header.tl0PicIdx == kNoTl0PicIdx) {
+ ManageFrameGeneric(std::move(frame));
+ return;
+ }
+
+ frame->picture_id = codec_header.pictureId % kPicIdLength;
+
+ if (last_unwrap_ == -1)
+ last_unwrap_ = codec_header.pictureId;
+
+ if (last_picture_id_ == -1)
+ last_picture_id_ = frame->picture_id;
+
+ // Find if there has been a gap in fully received frames and save the picture
+ // id of those frames in |not_yet_received_frames_|.
+ if (AheadOf<uint16_t, kPicIdLength>(frame->picture_id, last_picture_id_)) {
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ while (last_picture_id_ != frame->picture_id) {
+ not_yet_received_frames_.insert(last_picture_id_);
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ }
+ }
+
+ // Clean up info for base layers that are too old.
+ uint8_t old_tl0_pic_idx = codec_header.tl0PicIdx - kMaxLayerInfo;
+ auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx);
+ layer_info_.erase(layer_info_.begin(), clean_layer_info_to);
+
+ // Clean up info about not yet received frames that are too old.
+ uint16_t old_picture_id =
+ Subtract<kPicIdLength>(frame->picture_id, kMaxNotYetReceivedFrames);
+ auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id);
+ not_yet_received_frames_.erase(not_yet_received_frames_.begin(),
+ clean_frames_to);
+
+ if (frame->frame_type() == kVideoFrameKey) {
+ frame->num_references = 0;
+ layer_info_[codec_header.tl0PicIdx].fill(-1);
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ auto layer_info_it = layer_info_.find(codec_header.temporalIdx == 0
+ ? codec_header.tl0PicIdx - 1
+ : codec_header.tl0PicIdx);
+
+ // If we don't have the base layer frame yet, stash this frame.
+ if (layer_info_it == layer_info_.end()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ // A non keyframe base layer frame has been received, copy the layer info
+ // from the previous base layer frame and set a reference to the previous
+ // base layer frame.
+ if (codec_header.temporalIdx == 0) {
+ layer_info_it =
+ layer_info_
+ .insert(make_pair(codec_header.tl0PicIdx, layer_info_it->second))
+ .first;
+ frame->num_references = 1;
+ frame->references[0] = layer_info_it->second[0];
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ // Layer sync frame, this frame only references its base layer frame.
+ if (codec_header.layerSync) {
+ frame->num_references = 1;
+ frame->references[0] = layer_info_it->second[0];
+
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ // Find all references for this frame.
+ frame->num_references = 0;
+ for (uint8_t layer = 0; layer <= codec_header.temporalIdx; ++layer) {
+ RTC_DCHECK_NE(-1, layer_info_it->second[layer]);
+
+ // If we have not yet received a frame between this frame and the referenced
+ // frame then we have to wait for that frame to be completed first.
+ auto not_received_frame_it =
+ not_yet_received_frames_.upper_bound(layer_info_it->second[layer]);
+ if (not_received_frame_it != not_yet_received_frames_.end() &&
+ AheadOf<uint16_t, kPicIdLength>(frame->picture_id,
+ *not_received_frame_it)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ ++frame->num_references;
+ frame->references[layer] = layer_info_it->second[layer];
+ }
+
+ CompletedFrameVp8(std::move(frame));
+}
+
+void RtpFrameReferenceFinder::CompletedFrameVp8(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP8& codec_header = rtp_codec_header->VP8;
+
+ uint8_t tl0_pic_idx = codec_header.tl0PicIdx;
+ uint8_t temporal_index = codec_header.temporalIdx;
+ auto layer_info_it = layer_info_.find(tl0_pic_idx);
+
+ // Update this layer info and newer.
+ while (layer_info_it != layer_info_.end()) {
+ if (layer_info_it->second[temporal_index] != -1 &&
+ AheadOf<uint16_t, kPicIdLength>(layer_info_it->second[temporal_index],
+ frame->picture_id)) {
+ // The frame was not newer, then no subsequent layer info have to be
+ // update.
+ break;
+ }
+
+ layer_info_it->second[codec_header.temporalIdx] = frame->picture_id;
+ ++tl0_pic_idx;
+ layer_info_it = layer_info_.find(tl0_pic_idx);
+ }
+ not_yet_received_frames_.erase(frame->picture_id);
+
+ for (size_t i = 0; i < frame->num_references; ++i)
+ frame->references[i] = UnwrapPictureId(frame->references[i]);
+ frame->picture_id = UnwrapPictureId(frame->picture_id);
+
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+void RtpFrameReferenceFinder::ManageFrameVp9(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP9& codec_header = rtp_codec_header->VP9;
+
+ if (codec_header.picture_id == kNoPictureId) {
+ ManageFrameGeneric(std::move(frame));
+ return;
+ }
+
+ frame->spatial_layer = codec_header.spatial_idx;
+ frame->inter_layer_predicted = codec_header.inter_layer_predicted;
+ frame->picture_id = codec_header.picture_id % kPicIdLength;
+
+ if (last_unwrap_ == -1)
+ last_unwrap_ = codec_header.picture_id;
+
+ if (last_picture_id_ == -1)
+ last_picture_id_ = frame->picture_id;
+
+ if (codec_header.flexible_mode) {
+ frame->num_references = codec_header.num_ref_pics;
+ for (size_t i = 0; i < frame->num_references; ++i) {
+ frame->references[i] =
+ Subtract<1 << 16>(frame->picture_id, codec_header.pid_diff[i]);
+ }
+
+ CompletedFrameVp9(std::move(frame));
+ return;
+ }
+
+ if (codec_header.ss_data_available) {
+ // Scalability structures can only be sent with tl0 frames.
+ if (codec_header.temporal_idx != 0) {
+ LOG(LS_WARNING) << "Received scalability structure on a non base layer"
+ " frame. Scalability structure ignored.";
+ } else {
+ current_ss_idx_ = Add<kMaxGofSaved>(current_ss_idx_, 1);
+ scalability_structures_[current_ss_idx_] = codec_header.gof;
+ scalability_structures_[current_ss_idx_].pid_start = frame->picture_id;
+
+ auto pid_and_gof = std::make_pair(
+ frame->picture_id, &scalability_structures_[current_ss_idx_]);
+ gof_info_.insert(std::make_pair(codec_header.tl0_pic_idx, pid_and_gof));
+ }
+ }
+
+ // Clean up info for base layers that are too old.
+ uint8_t old_tl0_pic_idx = codec_header.tl0_pic_idx - kMaxGofSaved;
+ auto clean_gof_info_to = gof_info_.lower_bound(old_tl0_pic_idx);
+ gof_info_.erase(gof_info_.begin(), clean_gof_info_to);
+
+ if (frame->frame_type() == kVideoFrameKey) {
+ // When using GOF all keyframes must include the scalability structure.
+ if (!codec_header.ss_data_available)
+ LOG(LS_WARNING) << "Received keyframe without scalability structure";
+
+ frame->num_references = 0;
+ GofInfoVP9* gof = gof_info_.find(codec_header.tl0_pic_idx)->second.second;
+ FrameReceivedVp9(frame->picture_id, *gof);
+ CompletedFrameVp9(std::move(frame));
+ return;
+ }
+
+ auto gof_info_it = gof_info_.find(
+ (codec_header.temporal_idx == 0 && !codec_header.ss_data_available)
+ ? codec_header.tl0_pic_idx - 1
+ : codec_header.tl0_pic_idx);
+
+ // Gof info for this frame is not available yet, stash this frame.
+ if (gof_info_it == gof_info_.end()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ GofInfoVP9* gof = gof_info_it->second.second;
+ uint16_t picture_id_tl0 = gof_info_it->second.first;
+
+ FrameReceivedVp9(frame->picture_id, *gof);
+
+ // Make sure we don't miss any frame that could potentially have the
+ // up switch flag set.
+ if (MissingRequiredFrameVp9(frame->picture_id, *gof)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ if (codec_header.temporal_up_switch) {
+ auto pid_tidx =
+ std::make_pair(frame->picture_id, codec_header.temporal_idx);
+ up_switch_.insert(pid_tidx);
+ }
+
+ // If this is a base layer frame that contains a scalability structure
+ // then gof info has already been inserted earlier, so we only want to
+ // insert if we haven't done so already.
+ if (codec_header.temporal_idx == 0 && !codec_header.ss_data_available) {
+ auto pid_and_gof = std::make_pair(frame->picture_id, gof);
+ gof_info_.insert(std::make_pair(codec_header.tl0_pic_idx, pid_and_gof));
+ }
+
+ // Clean out old info about up switch frames.
+ uint16_t old_picture_id = Subtract<kPicIdLength>(last_picture_id_, 50);
+ auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id);
+ up_switch_.erase(up_switch_.begin(), up_switch_erase_to);
+
+ RTC_DCHECK(
+ (AheadOrAt<uint16_t, kPicIdLength>(frame->picture_id, picture_id_tl0)));
+
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof->pid_start, frame->picture_id);
+ size_t gof_idx = diff % gof->num_frames_in_gof;
+
+ // Populate references according to the scalability structure.
+ frame->num_references = gof->num_ref_pics[gof_idx];
+ for (size_t i = 0; i < frame->num_references; ++i) {
+ frame->references[i] =
+ Subtract<kPicIdLength>(frame->picture_id, gof->pid_diff[gof_idx][i]);
+
+ // If this is a reference to a frame earlier than the last up switch point,
+ // then ignore this reference.
+ if (UpSwitchInIntervalVp9(frame->picture_id, codec_header.temporal_idx,
+ frame->references[i])) {
+ --frame->num_references;
+ }
+ }
+
+ CompletedFrameVp9(std::move(frame));
+}
+
+bool RtpFrameReferenceFinder::MissingRequiredFrameVp9(uint16_t picture_id,
+ const GofInfoVP9& gof) {
+ size_t diff = ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, picture_id);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+
+ // For every reference this frame has, check if there is a frame missing in
+ // the interval (|ref_pid|, |picture_id|) in any of the lower temporal
+ // layers. If so, we are missing a required frame.
+ uint8_t num_references = gof.num_ref_pics[gof_idx];
+ for (size_t i = 0; i < num_references; ++i) {
+ uint16_t ref_pid =
+ Subtract<kPicIdLength>(picture_id, gof.pid_diff[gof_idx][i]);
+ for (size_t l = 0; l < temporal_idx; ++l) {
+ auto missing_frame_it = missing_frames_for_layer_[l].lower_bound(ref_pid);
+ if (missing_frame_it != missing_frames_for_layer_[l].end() &&
+ AheadOf<uint16_t, kPicIdLength>(picture_id, *missing_frame_it)) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id,
+ const GofInfoVP9& gof) {
+ RTC_DCHECK_NE(-1, last_picture_id_);
+
+ // If there is a gap, find which temporal layer the missing frames
+ // belong to and add the frame as missing for that temporal layer.
+ // Otherwise, remove this frame from the set of missing frames.
+ if (AheadOf<uint16_t, kPicIdLength>(picture_id, last_picture_id_)) {
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, last_picture_id_);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ while (last_picture_id_ != picture_id) {
+ ++gof_idx;
+ RTC_DCHECK_NE(0ul, gof_idx % gof.num_frames_in_gof);
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+ missing_frames_for_layer_[temporal_idx].insert(last_picture_id_);
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ }
+ } else {
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, picture_id);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+ missing_frames_for_layer_[temporal_idx].erase(picture_id);
+ }
+}
+
+bool RtpFrameReferenceFinder::UpSwitchInIntervalVp9(uint16_t picture_id,
+ uint8_t temporal_idx,
+ uint16_t pid_ref) {
+ for (auto up_switch_it = up_switch_.upper_bound(pid_ref);
+ up_switch_it != up_switch_.end() &&
+ AheadOf<uint16_t, kPicIdLength>(picture_id, up_switch_it->first);
+ ++up_switch_it) {
+ if (up_switch_it->second < temporal_idx)
+ return true;
+ }
+
+ return false;
+}
+
+void RtpFrameReferenceFinder::CompletedFrameVp9(
+ std::unique_ptr<RtpFrameObject> frame) {
+ for (size_t i = 0; i < frame->num_references; ++i)
+ frame->references[i] = UnwrapPictureId(frame->references[i]);
+ frame->picture_id = UnwrapPictureId(frame->picture_id);
+
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+uint16_t RtpFrameReferenceFinder::UnwrapPictureId(uint16_t picture_id) {
+ RTC_DCHECK_NE(-1, last_unwrap_);
+
+ uint16_t unwrap_truncated = last_unwrap_ % kPicIdLength;
+ uint16_t diff = MinDiff<uint16_t, kPicIdLength>(unwrap_truncated, picture_id);
+
+ if (AheadOf<uint16_t, kPicIdLength>(picture_id, unwrap_truncated))
+ last_unwrap_ = Add<1 << 16>(last_unwrap_, diff);
+ else
+ last_unwrap_ = Subtract<1 << 16>(last_unwrap_, diff);
+
+ return last_unwrap_;
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h
new file mode 100644
index 00000000000..7289b803bd0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
+
+#include <array>
+#include <map>
+#include <memory>
+#include <queue>
+#include <set>
+#include <utility>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+
+namespace webrtc {
+namespace video_coding {
+
+class RtpFrameObject;
+class OnCompleteFrameCallback;
+
+class RtpFrameReferenceFinder {
+ public:
+ explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback);
+ void ManageFrame(std::unique_ptr<RtpFrameObject> frame);
+
+ private:
+ static const uint16_t kPicIdLength = 1 << 7;
+ static const uint8_t kMaxTemporalLayers = 5;
+ static const int kMaxLayerInfo = 10;
+ static const int kMaxStashedFrames = 10;
+ static const int kMaxNotYetReceivedFrames = 20;
+ static const int kMaxGofSaved = 15;
+
+ rtc::CriticalSection crit_;
+
+ // Retry finding references for all frames that previously didn't have
+ // all information needed.
+ void RetryStashedFrames() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for generic frames.
+ void ManageFrameGeneric(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for Vp8 frames
+ void ManageFrameVp8(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Updates all necessary state used to determine frame references
+ // for Vp8 and then calls the |frame_callback| callback with the
+ // completed frame.
+ void CompletedFrameVp8(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for Vp9 frames
+ void ManageFrameVp9(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Unwrap the picture id and the frame references and then call the
+ // |frame_callback| callback with the completed frame.
+ void CompletedFrameVp9(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Check if we are missing a frame necessary to determine the references
+ // for this frame.
+ bool MissingRequiredFrameVp9(uint16_t picture_id, const GofInfoVP9& gof)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Updates which frames that have been received. If there is a gap,
+ // missing frames will be added to |missing_frames_for_layer_| or
+ // if this is an already missing frame then it will be removed.
+ void FrameReceivedVp9(uint16_t picture_id, const GofInfoVP9& gof)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Check if there is a frame with the up-switch flag set in the interval
+ // (|pid_ref|, |picture_id|) with temporal layer smaller than |temporal_idx|.
+ bool UpSwitchInIntervalVp9(uint16_t picture_id,
+ uint8_t temporal_idx,
+ uint16_t pid_ref) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // All picture ids are unwrapped to 16 bits.
+ uint16_t UnwrapPictureId(uint16_t picture_id) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Holds the last sequence number of the last frame that has been created
+ // given the last sequence number of a given keyframe.
+ std::map<uint16_t, uint16_t, DescendingSeqNumComp<uint16_t>> last_seq_num_gop_
+ GUARDED_BY(crit_);
+
+ // Save the last picture id in order to detect when there is a gap in frames
+ // that have not yet been fully received.
+ int last_picture_id_ GUARDED_BY(crit_);
+
+ // The last unwrapped picture id. Used to unwrap the picture id from a length
+ // of |kPicIdLength| to 16 bits.
+ int last_unwrap_ GUARDED_BY(crit_);
+
+ // Frames earlier than the last received frame that have not yet been
+ // fully received.
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t, kPicIdLength>>
+ not_yet_received_frames_ GUARDED_BY(crit_);
+
+ // Frames that have been fully received but didn't have all the information
+ // needed to determine their references.
+ std::queue<std::unique_ptr<RtpFrameObject>> stashed_frames_ GUARDED_BY(crit_);
+
+ // Holds the information about the last completed frame for a given temporal
+ // layer given a Tl0 picture index.
+ std::map<uint8_t,
+ std::array<int16_t, kMaxTemporalLayers>,
+ DescendingSeqNumComp<uint8_t>>
+ layer_info_ GUARDED_BY(crit_);
+
+ // Where the current scalability structure is in the
+ // |scalability_structures_| array.
+ uint8_t current_ss_idx_;
+
+ // Holds received scalability structures.
+ std::array<GofInfoVP9, kMaxGofSaved> scalability_structures_
+ GUARDED_BY(crit_);
+
+ // Holds the picture id and the Gof information for a given TL0 picture index.
+ std::map<uint8_t,
+ std::pair<uint16_t, GofInfoVP9*>,
+ DescendingSeqNumComp<uint8_t>>
+ gof_info_ GUARDED_BY(crit_);
+
+ // Keep track of which picture id and which temporal layer that had the
+ // up switch flag set.
+ std::map<uint16_t, uint8_t> up_switch_ GUARDED_BY(crit_);
+
+ // For every temporal layer, keep a set of which frames that are missing.
+ std::array<std::set<uint16_t, DescendingSeqNumComp<uint16_t, kPicIdLength>>,
+ kMaxTemporalLayers>
+ missing_frames_for_layer_ GUARDED_BY(crit_);
+
+ OnCompleteFrameCallback* frame_callback_;
+};
+
+} // namespace video_coding
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc b/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
index 97d63e0fb54..d5fa9ae936c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
@@ -12,9 +12,11 @@
#include <stdio.h>
+#include <cstdlib>
#include <map>
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
@@ -227,7 +229,6 @@ class SsrcHandlers {
return -1;
}
- handler->rtp_module_->SetNACKStatus(kNackOff);
handler->rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset,
kDefaultTransmissionTimeOffsetExtensionId);
@@ -341,7 +342,7 @@ class RtpPlayerImpl : public RtpPlayerInterface {
assert(packet_source);
assert(packet_source->get());
packet_source_.swap(*packet_source);
- srand(321);
+ std::srand(321);
}
virtual ~RtpPlayerImpl() {}
@@ -434,7 +435,8 @@ class RtpPlayerImpl : public RtpPlayerInterface {
if (no_loss_startup_ > 0) {
no_loss_startup_--;
- } else if ((rand() + 1.0) / (RAND_MAX + 1.0) < loss_rate_) { // NOLINT
+ } else if ((std::rand() + 1.0) / (RAND_MAX + 1.0) <
+ loss_rate_) { // NOLINT
uint16_t seq_num = header.sequenceNumber;
lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h b/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
index 36b26db92e4..9eb957194f1 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
+++ b/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
@@ -12,6 +12,7 @@
#include <list>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/video_coding/packet.h"
#include "webrtc/modules/video_coding/test/test_util.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc b/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
index e774db16520..8c674ef5042 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
@@ -14,6 +14,7 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/video_coding/test/test_util.h"
#include "webrtc/system_wrappers/include/clock.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/timing.h b/chromium/third_party/webrtc/modules/video_coding/timing.h
index a45eee38c6c..e593c9acbc3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/timing.h
+++ b/chromium/third_party/webrtc/modules/video_coding/timing.h
@@ -28,7 +28,7 @@ class VCMTiming {
// The primary timing component should be passed
// if this is the dual timing component.
explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL);
- ~VCMTiming();
+ virtual ~VCMTiming();
// Resets the timing to the initial state.
void Reset();
@@ -69,11 +69,11 @@ class VCMTiming {
// Returns the receiver system time when the frame with timestamp
// frame_timestamp should be rendered, assuming that the system time currently
// is now_ms.
- int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
+ virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
// Returns the maximum time in ms that we can wait for a frame to become
// complete before we must pass it to the decoder.
- uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
+ virtual uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
// Returns the current target delay which is required delay + decode time +
// render delay.
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc b/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
index c95048c0743..5de7526ac24 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
@@ -73,6 +73,7 @@ void FrameDropper::Reset() {
incoming_frame_rate_ = kDefaultIncomingFrameRate;
large_frame_accumulation_count_ = 0;
+ large_frame_accumulation_chunk_size_ = 0;
large_frame_accumulation_spread_ = 0.5 * kDefaultIncomingFrameRate;
drop_next_ = false;
@@ -129,13 +130,6 @@ void FrameDropper::Fill(size_t framesize_bytes, bool delta_frame) {
// Change the level of the accumulator (bucket)
accumulator_ += framesize_kbits;
CapAccumulator();
- LOG(LS_VERBOSE) << "FILL acc " << accumulator_ << " max " << accumulator_max_
- << " count " << large_frame_accumulation_count_ << " chunk "
- << large_frame_accumulation_chunk_size_ << " spread "
- << large_frame_accumulation_spread_ << " delta avg "
- << delta_frame_size_avg_kbits_.filtered() << " SIZE "
- << framesize_kbits << "key frame ratio "
- << key_frame_ratio_.filtered();
}
void FrameDropper::Leak(uint32_t input_framerate) {
@@ -160,10 +154,6 @@ void FrameDropper::Leak(uint32_t input_framerate) {
if (accumulator_ < 0.0f) {
accumulator_ = 0.0f;
}
- LOG(LS_VERBOSE) << "LEAK acc " << accumulator_ << " max " << accumulator_max_
- << " count " << large_frame_accumulation_count_ << " spread "
- << large_frame_accumulation_spread_ << " delta avg "
- << delta_frame_size_avg_kbits_.filtered();
UpdateRatio();
}
@@ -201,8 +191,6 @@ bool FrameDropper::DropFrame() {
drop_next_ = false;
drop_count_ = 0;
}
- LOG(LS_VERBOSE) << " drop_ratio_ " << drop_ratio_.filtered()
- << " drop_count_ " << drop_count_;
if (drop_ratio_.filtered() >= 0.5f) { // Drops per keep
// limit is the number of frames we should drop between each kept frame
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc
new file mode 100644
index 00000000000..97f1da30e40
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+
+IvfFileWriter::IvfFileWriter(const std::string& file_name,
+ std::unique_ptr<FileWrapper> file,
+ VideoCodecType codec_type)
+ : codec_type_(codec_type),
+ num_frames_(0),
+ width_(0),
+ height_(0),
+ last_timestamp_(-1),
+ using_capture_timestamps_(false),
+ file_name_(file_name),
+ file_(std::move(file)) {}
+
+IvfFileWriter::~IvfFileWriter() {
+ Close();
+}
+
+const size_t kIvfHeaderSize = 32;
+
+std::unique_ptr<IvfFileWriter> IvfFileWriter::Open(const std::string& file_name,
+ VideoCodecType codec_type) {
+ std::unique_ptr<IvfFileWriter> file_writer;
+ std::unique_ptr<FileWrapper> file(FileWrapper::Create());
+ if (file->OpenFile(file_name.c_str(), false) != 0)
+ return file_writer;
+
+ file_writer.reset(new IvfFileWriter(
+ file_name, std::unique_ptr<FileWrapper>(std::move(file)), codec_type));
+ if (!file_writer->WriteHeader())
+ file_writer.reset();
+
+ return file_writer;
+}
+
+bool IvfFileWriter::WriteHeader() {
+ if (file_->Rewind() != 0) {
+ LOG(LS_WARNING) << "Unable to rewind output file " << file_name_;
+ return false;
+ }
+
+ uint8_t ivf_header[kIvfHeaderSize] = {0};
+ ivf_header[0] = 'D';
+ ivf_header[1] = 'K';
+ ivf_header[2] = 'I';
+ ivf_header[3] = 'F';
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[4], 0); // Version.
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[6], 32); // Header size.
+
+ switch (codec_type_) {
+ case kVideoCodecVP8:
+ ivf_header[8] = 'V';
+ ivf_header[9] = 'P';
+ ivf_header[10] = '8';
+ ivf_header[11] = '0';
+ break;
+ case kVideoCodecVP9:
+ ivf_header[8] = 'V';
+ ivf_header[9] = 'P';
+ ivf_header[10] = '9';
+ ivf_header[11] = '0';
+ break;
+ case kVideoCodecH264:
+ ivf_header[8] = 'H';
+ ivf_header[9] = '2';
+ ivf_header[10] = '6';
+ ivf_header[11] = '4';
+ break;
+ default:
+ LOG(LS_ERROR) << "Unknown CODEC type: " << codec_type_;
+ return false;
+ }
+
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[12], width_);
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[14], height_);
+ // Render timestamps are in ms (1/1000 scale), while RTP timestamps use a
+ // 90kHz clock.
+ ByteWriter<uint32_t>::WriteLittleEndian(
+ &ivf_header[16], using_capture_timestamps_ ? 1000 : 90000);
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[20], 1);
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[24],
+ static_cast<uint32_t>(num_frames_));
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[28], 0); // Reserved.
+
+ if (!file_->Write(ivf_header, kIvfHeaderSize)) {
+ LOG(LS_ERROR) << "Unable to write IVF header for file " << file_name_;
+ return false;
+ }
+
+ return true;
+}
+
+bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image) {
+ width_ = encoded_image._encodedWidth;
+ height_ = encoded_image._encodedHeight;
+ RTC_CHECK_GT(width_, 0);
+ RTC_CHECK_GT(height_, 0);
+ using_capture_timestamps_ = encoded_image._timeStamp == 0;
+
+ if (!WriteHeader())
+ return false;
+
+ std::string codec_name;
+ switch (codec_type_) {
+ case kVideoCodecVP8:
+ codec_name = "VP8";
+ break;
+ case kVideoCodecVP9:
+ codec_name = "VP9";
+ break;
+ case kVideoCodecH264:
+ codec_name = "H264";
+ break;
+ default:
+ codec_name = "Unknown";
+ }
+ LOG(LS_WARNING) << "Created IVF file " << file_name_
+ << " for codec data of type " << codec_name
+ << " at resolution " << width_ << " x " << height_
+ << ", using " << (using_capture_timestamps_ ? "1" : "90")
+ << "kHz clock resolution.";
+ return true;
+}
+
+bool IvfFileWriter::WriteFrame(const EncodedImage& encoded_image) {
+ RTC_DCHECK(file_->Open());
+
+ if (num_frames_ == 0 && !InitFromFirstFrame(encoded_image))
+ return false;
+
+ if ((encoded_image._encodedWidth > 0 || encoded_image._encodedHeight > 0) &&
+ (encoded_image._encodedHeight != height_ ||
+ encoded_image._encodedWidth != width_)) {
+ LOG(LS_WARNING)
+ << "Incomig frame has diffferent resolution then previous: (" << width_
+ << "x" << height_ << ") -> (" << encoded_image._encodedWidth << "x"
+ << encoded_image._encodedHeight << ")";
+ }
+
+ int64_t timestamp = using_capture_timestamps_
+ ? encoded_image.capture_time_ms_
+ : wrap_handler_.Unwrap(encoded_image._timeStamp);
+ if (last_timestamp_ != -1 && timestamp <= last_timestamp_) {
+ LOG(LS_WARNING) << "Timestamp no increasing: " << last_timestamp_ << " -> "
+ << timestamp;
+ }
+ last_timestamp_ = timestamp;
+
+ const size_t kFrameHeaderSize = 12;
+ uint8_t frame_header[kFrameHeaderSize] = {};
+ ByteWriter<uint32_t>::WriteLittleEndian(
+ &frame_header[0], static_cast<uint32_t>(encoded_image._length));
+ ByteWriter<uint64_t>::WriteLittleEndian(&frame_header[4], timestamp);
+ if (!file_->Write(frame_header, kFrameHeaderSize) ||
+ !file_->Write(encoded_image._buffer, encoded_image._length)) {
+ LOG(LS_ERROR) << "Unable to write frame to file " << file_name_;
+ return false;
+ }
+
+ ++num_frames_;
+ return true;
+}
+
+bool IvfFileWriter::Close() {
+ if (!file_->Open())
+ return false;
+
+ if (num_frames_ == 0) {
+ // No frame written to file, close and remove it entirely if possible.
+ file_->CloseFile();
+ if (remove(file_name_.c_str()) != 0)
+ LOG(LS_WARNING) << "Failed to remove empty IVF file " << file_name_;
+
+ return true;
+ }
+
+ return WriteHeader() && (file_->CloseFile() == 0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h
new file mode 100644
index 00000000000..25d68a28034
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
+
+#include <memory>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/video_frame.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+
+namespace webrtc {
+
+class IvfFileWriter {
+ public:
+ ~IvfFileWriter();
+
+ static std::unique_ptr<IvfFileWriter> Open(const std::string& file_name,
+ VideoCodecType codec_type);
+ bool WriteFrame(const EncodedImage& encoded_image);
+ bool Close();
+
+ private:
+ IvfFileWriter(const std::string& path_name,
+ std::unique_ptr<FileWrapper> file,
+ VideoCodecType codec_type);
+ bool WriteHeader();
+ bool InitFromFirstFrame(const EncodedImage& encoded_image);
+
+ const VideoCodecType codec_type_;
+ size_t num_frames_;
+ uint16_t width_;
+ uint16_t height_;
+ int64_t last_timestamp_;
+ bool using_capture_timestamps_;
+ rtc::TimestampWrapAroundHandler wrap_handler_;
+ const std::string file_name_;
+ std::unique_ptr<FileWrapper> file_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileWriter);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc
new file mode 100644
index 00000000000..bdeef2abd5b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc
@@ -0,0 +1,176 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
+
+#include <memory>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+namespace {
+static const int kHeaderSize = 32;
+static const int kFrameHeaderSize = 12;
+static uint8_t dummy_payload[4] = {0, 1, 2, 3};
+} // namespace
+
+class IvfFileWriterTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ const int64_t start_id =
+ reinterpret_cast<int64_t>(this) ^ rtc::TimeMicros();
+ int64_t id = start_id;
+ do {
+ std::ostringstream oss;
+ oss << test::OutputPath() << "ivf_test_file_" << id++ << ".ivf";
+ file_name_ = oss.str();
+ } while (id < start_id + 100 && FileExists());
+ ASSERT_LT(id, start_id + 100);
+ }
+
+ bool WriteDummyTestFrames(int width,
+ int height,
+ int num_frames,
+ bool use_capture_tims_ms) {
+ EncodedImage frame;
+ frame._buffer = dummy_payload;
+ frame._encodedWidth = width;
+ frame._encodedHeight = height;
+ for (int i = 1; i <= num_frames; ++i) {
+ frame._length = i % sizeof(dummy_payload);
+ if (use_capture_tims_ms) {
+ frame.capture_time_ms_ = i;
+ } else {
+ frame._timeStamp = i;
+ }
+ if (!file_writer_->WriteFrame(frame))
+ return false;
+ }
+ return true;
+ }
+
+ void VerifyIvfHeader(FileWrapper* file,
+ const uint8_t fourcc[4],
+ int width,
+ int height,
+ uint32_t num_frames,
+ bool use_capture_tims_ms) {
+ uint8_t data[kHeaderSize];
+ ASSERT_EQ(kHeaderSize, file->Read(data, kHeaderSize));
+
+ uint8_t dkif[4] = {'D', 'K', 'I', 'F'};
+ EXPECT_EQ(0, memcmp(dkif, data, 4));
+ EXPECT_EQ(0u, ByteReader<uint16_t>::ReadLittleEndian(&data[4]));
+ EXPECT_EQ(32u, ByteReader<uint16_t>::ReadLittleEndian(&data[6]));
+ EXPECT_EQ(0, memcmp(fourcc, &data[8], 4));
+ EXPECT_EQ(width, ByteReader<uint16_t>::ReadLittleEndian(&data[12]));
+ EXPECT_EQ(height, ByteReader<uint16_t>::ReadLittleEndian(&data[14]));
+ EXPECT_EQ(use_capture_tims_ms ? 1000u : 90000u,
+ ByteReader<uint32_t>::ReadLittleEndian(&data[16]));
+ EXPECT_EQ(1u, ByteReader<uint32_t>::ReadLittleEndian(&data[20]));
+ EXPECT_EQ(num_frames, ByteReader<uint32_t>::ReadLittleEndian(&data[24]));
+ EXPECT_EQ(0u, ByteReader<uint32_t>::ReadLittleEndian(&data[28]));
+ }
+
+ void VerifyDummyTestFrames(FileWrapper* file, uint32_t num_frames) {
+ const int kMaxFrameSize = 4;
+ for (uint32_t i = 1; i <= num_frames; ++i) {
+ uint8_t frame_header[kFrameHeaderSize];
+ ASSERT_EQ(kFrameHeaderSize, file->Read(frame_header, kFrameHeaderSize));
+ uint32_t frame_length =
+ ByteReader<uint32_t>::ReadLittleEndian(&frame_header[0]);
+ EXPECT_EQ(i % 4, frame_length);
+ uint64_t timestamp =
+ ByteReader<uint64_t>::ReadLittleEndian(&frame_header[4]);
+ EXPECT_EQ(i, timestamp);
+
+ uint8_t data[kMaxFrameSize] = {};
+ ASSERT_EQ(frame_length,
+ static_cast<uint32_t>(file->Read(data, frame_length)));
+ EXPECT_EQ(0, memcmp(data, dummy_payload, frame_length));
+ }
+ }
+
+ void RunBasicFileStructureTest(VideoCodecType codec_type,
+ const uint8_t fourcc[4],
+ bool use_capture_tims_ms) {
+ file_writer_ = IvfFileWriter::Open(file_name_, codec_type);
+ ASSERT_TRUE(file_writer_.get());
+ const int kWidth = 320;
+ const int kHeight = 240;
+ const int kNumFrames = 257;
+ EXPECT_TRUE(
+ WriteDummyTestFrames(kWidth, kHeight, kNumFrames, use_capture_tims_ms));
+ EXPECT_TRUE(file_writer_->Close());
+
+ std::unique_ptr<FileWrapper> out_file(FileWrapper::Create());
+ ASSERT_EQ(0, out_file->OpenFile(file_name_.c_str(), true));
+ VerifyIvfHeader(out_file.get(), fourcc, kWidth, kHeight, kNumFrames,
+ use_capture_tims_ms);
+ VerifyDummyTestFrames(out_file.get(), kNumFrames);
+
+ EXPECT_EQ(0, out_file->CloseFile());
+ EXPECT_EQ(0, remove(file_name_.c_str()));
+ }
+
+ bool FileExists() {
+ std::unique_ptr<FileWrapper> file_wrapper(FileWrapper::Create());
+ return file_wrapper->OpenFile(file_name_.c_str(), true) == 0;
+ }
+
+ std::string file_name_;
+ std::unique_ptr<IvfFileWriter> file_writer_;
+};
+
+TEST_F(IvfFileWriterTest, RemovesUnusedFile) {
+ file_writer_ = IvfFileWriter::Open(file_name_, kVideoCodecVP8);
+ ASSERT_TRUE(file_writer_.get() != nullptr);
+ EXPECT_TRUE(FileExists());
+ EXPECT_TRUE(file_writer_->Close());
+ EXPECT_FALSE(FileExists());
+ EXPECT_FALSE(file_writer_->Close()); // Can't close twice.
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP8FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP8, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP8FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP8, fourcc, true);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP9FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP9, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP9FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP9, fourcc, true);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicH264FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'H', '2', '6', '4'};
+ RunBasicFileStructureTest(kVideoCodecH264, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicH264FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'H', '2', '6', '4'};
+ RunBasicFileStructureTest(kVideoCodecH264, fourcc, true);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
index c6e56697310..bb60ee036e3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
@@ -11,29 +11,39 @@
namespace webrtc {
+namespace {
static const int kMinFps = 5;
-static const int kMeasureSecondsDownscale = 3;
// Threshold constant used until first downscale (to permit fast rampup).
static const int kMeasureSecondsFastUpscale = 2;
static const int kMeasureSecondsUpscale = 5;
+static const int kMeasureSecondsDownscale = 5;
static const int kFramedropPercentThreshold = 60;
-static const int kHdResolutionThreshold = 700 * 500;
-static const int kHdBitrateThresholdKbps = 500;
-
-const int QualityScaler::kDefaultLowQpDenominator = 3;
-// Note that this is the same for width and height to permit 120x90 in both
-// portrait and landscape mode.
-const int QualityScaler::kDefaultMinDownscaleDimension = 90;
-
-QualityScaler::QualityScaler()
- : low_qp_threshold_(-1),
- framerate_down_(false),
- min_width_(kDefaultMinDownscaleDimension),
- min_height_(kDefaultMinDownscaleDimension) {}
+// Min width/height to downscale to, set to not go below QVGA, but with some
+// margin to permit "almost-QVGA" resolutions, such as QCIF.
+static const int kMinDownscaleDimension = 140;
+// Initial resolutions corresponding to a bitrate. Aa bit above their actual
+// values to permit near-VGA and near-QVGA resolutions to use the same
+// mechanism.
+static const int kVgaBitrateThresholdKbps = 500;
+static const int kVgaNumPixels = 700 * 500; // 640x480
+static const int kQvgaBitrateThresholdKbps = 250;
+static const int kQvgaNumPixels = 400 * 300; // 320x240
+} // namespace
+
+// QP thresholds are chosen to be high enough to be hit in practice when quality
+// is good, but also low enough to not cause a flip-flop behavior (e.g. going up
+// in resolution shouldn't give so bad quality that we should go back down).
+
+const int QualityScaler::kLowVp8QpThreshold = 29;
+const int QualityScaler::kBadVp8QpThreshold = 95;
+
+const int QualityScaler::kLowH264QpThreshold = 22;
+const int QualityScaler::kBadH264QpThreshold = 35;
+
+QualityScaler::QualityScaler() : low_qp_threshold_(-1) {}
void QualityScaler::Init(int low_qp_threshold,
int high_qp_threshold,
- bool use_framerate_reduction,
int initial_bitrate_kbps,
int width,
int height,
@@ -41,7 +51,6 @@ void QualityScaler::Init(int low_qp_threshold,
ClearSamples();
low_qp_threshold_ = low_qp_threshold;
high_qp_threshold_ = high_qp_threshold;
- use_framerate_reduction_ = use_framerate_reduction;
downscale_shift_ = 0;
// Use a faster window for upscaling initially (but be more graceful later).
// This enables faster initial rampups without risking strong up-down
@@ -49,25 +58,24 @@ void QualityScaler::Init(int low_qp_threshold,
measure_seconds_upscale_ = kMeasureSecondsFastUpscale;
const int init_width = width;
const int init_height = height;
- // TODO(glaznev): Investigate using thresholds for other resolutions
- // or threshold tables.
- if (initial_bitrate_kbps > 0 &&
- initial_bitrate_kbps < kHdBitrateThresholdKbps) {
- // Start scaling to roughly VGA.
- while (width * height > kHdResolutionThreshold) {
+ if (initial_bitrate_kbps > 0) {
+ int init_num_pixels = width * height;
+ if (initial_bitrate_kbps < kVgaBitrateThresholdKbps)
+ init_num_pixels = kVgaNumPixels;
+ if (initial_bitrate_kbps < kQvgaBitrateThresholdKbps)
+ init_num_pixels = kQvgaNumPixels;
+ while (width * height > init_num_pixels) {
++downscale_shift_;
width /= 2;
height /= 2;
}
}
+
+ // Zero out width/height so they can be checked against inside
+ // UpdateTargetResolution.
+ res_.width = res_.height = 0;
UpdateTargetResolution(init_width, init_height);
ReportFramerate(fps);
- target_framerate_ = -1;
-}
-
-void QualityScaler::SetMinResolution(int min_width, int min_height) {
- min_width_ = min_width;
- min_height_ = min_height;
}
// Report framerate(fps) to estimate # of samples.
@@ -96,34 +104,14 @@ void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
int avg_drop = 0;
int avg_qp = 0;
- // When encoder consistently overshoots, framerate reduction and spatial
- // resizing will be triggered to get a smoother video.
if ((framedrop_percent_.GetAverage(num_samples_downscale_, &avg_drop) &&
avg_drop >= kFramedropPercentThreshold) ||
(average_qp_downscale_.GetAverage(num_samples_downscale_, &avg_qp) &&
avg_qp > high_qp_threshold_)) {
- // Reducing frame rate before spatial resolution change.
- // Reduce frame rate only when it is above a certain number.
- // Only one reduction is allowed for now.
- // TODO(jackychen): Allow more than one framerate reduction.
- if (use_framerate_reduction_ && !framerate_down_ && framerate_ >= 20) {
- target_framerate_ = framerate_ / 2;
- framerate_down_ = true;
- // If frame rate has been updated, clear the buffer. We don't want
- // spatial resolution to change right after frame rate change.
- ClearSamples();
- } else {
- AdjustScale(false);
- }
+ AdjustScale(false);
} else if (average_qp_upscale_.GetAverage(num_samples_upscale_, &avg_qp) &&
avg_qp <= low_qp_threshold_) {
- if (use_framerate_reduction_ && framerate_down_) {
- target_framerate_ = -1;
- framerate_down_ = false;
- ClearSamples();
- } else {
- AdjustScale(true);
- }
+ AdjustScale(true);
}
UpdateTargetResolution(frame.width(), frame.height());
}
@@ -132,10 +120,6 @@ QualityScaler::Resolution QualityScaler::GetScaledResolution() const {
return res_;
}
-int QualityScaler::GetTargetFramerate() const {
- return target_framerate_;
-}
-
const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
Resolution res = GetScaledResolution();
if (res.width == frame.width())
@@ -146,24 +130,39 @@ const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
if (scaler_.Scale(frame, &scaled_frame_) != 0)
return frame;
+ // TODO(perkj): Refactor the scaler to not own |scaled_frame|. VideoFrame are
+ // just thin wrappers so instead the scaler should return a
+ // rtc::scoped_refptr<VideoFrameBuffer> and a new VideoFrame be created with
+ // the meta data from |frame|. That way we would not have to set all these
+ // meta data.
scaled_frame_.set_ntp_time_ms(frame.ntp_time_ms());
scaled_frame_.set_timestamp(frame.timestamp());
scaled_frame_.set_render_time_ms(frame.render_time_ms());
+ scaled_frame_.set_rotation(frame.rotation());
return scaled_frame_;
}
void QualityScaler::UpdateTargetResolution(int frame_width, int frame_height) {
assert(downscale_shift_ >= 0);
- res_.width = frame_width;
- res_.height = frame_height;
+ int shifts_performed = 0;
for (int shift = downscale_shift_;
- shift > 0 && (res_.width / 2 >= min_width_) &&
- (res_.height / 2 >= min_height_);
- --shift) {
- res_.width /= 2;
- res_.height /= 2;
+ shift > 0 && (frame_width / 2 >= kMinDownscaleDimension) &&
+ (frame_height / 2 >= kMinDownscaleDimension);
+ --shift, ++shifts_performed) {
+ frame_width /= 2;
+ frame_height /= 2;
+ }
+ // Clamp to number of shifts actually performed to not be stuck trying to
+ // scale way beyond QVGA.
+ downscale_shift_ = shifts_performed;
+ if (res_.width == frame_width && res_.height == frame_height) {
+ // No reset done/needed, using same resolution.
+ return;
}
+ res_.width = frame_width;
+ res_.height = frame_height;
+ ClearSamples();
}
void QualityScaler::ClearSamples() {
@@ -184,11 +183,10 @@ void QualityScaler::AdjustScale(bool up) {
if (downscale_shift_ < 0)
downscale_shift_ = 0;
if (!up) {
- // Hit first downscale, start using a slower threshold for going up.
+ // First downscale hit, start using a slower threshold for going up.
measure_seconds_upscale_ = kMeasureSecondsUpscale;
UpdateSampleCounts();
}
- ClearSamples();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
index 34dda0e9f37..fe70393c21c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
@@ -17,8 +17,6 @@
namespace webrtc {
class QualityScaler {
public:
- static const int kDefaultLowQpDenominator;
- static const int kDefaultMinDownscaleDimension;
struct Resolution {
int width;
int height;
@@ -27,22 +25,27 @@ class QualityScaler {
QualityScaler();
void Init(int low_qp_threshold,
int high_qp_threshold,
- bool use_framerate_reduction,
int initial_bitrate_kbps,
int width,
int height,
int fps);
- void SetMinResolution(int min_width, int min_height);
void ReportFramerate(int framerate);
void ReportQP(int qp);
void ReportDroppedFrame();
- void Reset(int framerate, int bitrate, int width, int height);
void OnEncodeFrame(const VideoFrame& frame);
Resolution GetScaledResolution() const;
const VideoFrame& GetScaledFrame(const VideoFrame& frame);
- int GetTargetFramerate() const;
int downscale_shift() const { return downscale_shift_; }
+ // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+ // bitstream range of [0, 127] and not the user-level range of [0,63].
+ static const int kLowVp8QpThreshold;
+ static const int kBadVp8QpThreshold;
+
+ // H264 QP is in the range [0, 51].
+ static const int kLowH264QpThreshold;
+ static const int kBadH264QpThreshold;
+
private:
void AdjustScale(bool up);
void UpdateTargetResolution(int frame_width, int frame_height);
@@ -59,17 +62,12 @@ class QualityScaler {
MovingAverage<int> average_qp_downscale_;
int framerate_;
- int target_framerate_;
int low_qp_threshold_;
int high_qp_threshold_;
MovingAverage<int> framedrop_percent_;
Resolution res_;
int downscale_shift_;
- int framerate_down_;
- bool use_framerate_reduction_;
- int min_width_;
- int min_height_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
index 72e9db405ed..fdec081c900 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
@@ -16,33 +16,24 @@ namespace webrtc {
namespace {
static const int kNumSeconds = 10;
static const int kWidth = 1920;
-static const int kWidthVga = 640;
static const int kHalfWidth = kWidth / 2;
static const int kHeight = 1080;
-static const int kHeightVga = 480;
static const int kFramerate = 30;
static const int kLowQp = 15;
static const int kNormalQp = 30;
+static const int kLowQpThreshold = 18;
static const int kHighQp = 40;
-static const int kMaxQp = 56;
-static const int kDisabledBadQpThreshold = kMaxQp + 1;
+static const int kDisabledBadQpThreshold = 64;
static const int kLowInitialBitrateKbps = 300;
// These values need to be in sync with corresponding constants
// in quality_scaler.cc
-static const int kMeasureSecondsDownscale = 3;
static const int kMeasureSecondsFastUpscale = 2;
static const int kMeasureSecondsUpscale = 5;
+static const int kMeasureSecondsDownscale = 5;
+static const int kMinDownscaleDimension = 140;
} // namespace
class QualityScalerTest : public ::testing::Test {
- public:
- // Temporal and spatial resolution.
- struct Resolution {
- int framerate;
- int width;
- int height;
- };
-
protected:
enum ScaleDirection {
kKeepScaleAtHighQp,
@@ -50,13 +41,11 @@ class QualityScalerTest : public ::testing::Test {
kScaleDownAboveHighQp,
kScaleUp
};
- enum BadQualityMetric { kDropFrame, kReportLowQP };
QualityScalerTest() {
input_frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, kHalfWidth,
kHalfWidth);
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false,
- 0, 0, 0, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, 0, 0, 0, kFramerate);
qs_.OnEncodeFrame(input_frame_);
}
@@ -103,16 +92,6 @@ class QualityScalerTest : public ::testing::Test {
void DoesNotDownscaleFrameDimensions(int width, int height);
- Resolution TriggerResolutionChange(BadQualityMetric dropframe_lowqp,
- int num_second,
- int initial_framerate);
-
- void VerifyQualityAdaptation(int initial_framerate,
- int seconds_downscale,
- int seconds_upscale,
- bool expect_spatial_resize,
- bool expect_framerate_reduction);
-
void DownscaleEndsAt(int input_width,
int input_height,
int end_width,
@@ -200,7 +179,7 @@ void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
int min_dimension = initial_min_dimension;
int current_shift = 0;
// Drop all frames to force-trigger downscaling.
- while (min_dimension >= 2 * QualityScaler::kDefaultMinDownscaleDimension) {
+ while (min_dimension >= 2 * kMinDownscaleDimension) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within "
<< kNumSeconds << " seconds.";
qs_.OnEncodeFrame(input_frame_);
@@ -270,133 +249,50 @@ TEST_F(QualityScalerTest, DoesNotDownscaleFrom1Px) {
DoesNotDownscaleFrameDimensions(1, 1);
}
-QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
- BadQualityMetric dropframe_lowqp,
- int num_second,
- int initial_framerate) {
- QualityScalerTest::Resolution res;
- res.framerate = initial_framerate;
- qs_.OnEncodeFrame(input_frame_);
- res.width = qs_.GetScaledResolution().width;
- res.height = qs_.GetScaledResolution().height;
- for (int i = 0; i < kFramerate * num_second; ++i) {
- switch (dropframe_lowqp) {
- case kReportLowQP:
- qs_.ReportQP(kLowQp);
- break;
- case kDropFrame:
- qs_.ReportDroppedFrame();
- break;
- }
- qs_.OnEncodeFrame(input_frame_);
- // Simulate the case when SetRates is called right after reducing
- // framerate.
- qs_.ReportFramerate(initial_framerate);
- res.framerate = qs_.GetTargetFramerate();
- if (res.framerate != -1)
- qs_.ReportFramerate(res.framerate);
- res.width = qs_.GetScaledResolution().width;
- res.height = qs_.GetScaledResolution().height;
- }
- return res;
-}
-
-void QualityScalerTest::VerifyQualityAdaptation(
- int initial_framerate,
- int seconds_downscale,
- int seconds_upscale,
- bool expect_spatial_resize,
- bool expect_framerate_reduction) {
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, true, 0, 0, 0, initial_framerate);
- qs_.OnEncodeFrame(input_frame_);
- int init_width = qs_.GetScaledResolution().width;
- int init_height = qs_.GetScaledResolution().height;
-
- // Test reducing framerate by dropping frame continuously.
- QualityScalerTest::Resolution res =
- TriggerResolutionChange(kDropFrame, seconds_downscale, initial_framerate);
-
- if (expect_framerate_reduction) {
- EXPECT_LT(res.framerate, initial_framerate);
- } else {
- // No framerate reduction, video decimator should be disabled.
- EXPECT_EQ(-1, res.framerate);
- }
-
- if (expect_spatial_resize) {
- EXPECT_LT(res.width, init_width);
- EXPECT_LT(res.height, init_height);
- } else {
- EXPECT_EQ(init_width, res.width);
- EXPECT_EQ(init_height, res.height);
- }
-
- // The "seconds * 1.5" is to ensure spatial resolution to recover.
- // For example, in 6 seconds test, framerate reduction happens in the first
- // 3 seconds from 30fps to 15fps and causes the buffer size to be half of the
- // original one. Then it will take only 45 samples to downscale (twice in 90
- // samples). So to recover the resolution changes, we need more than 10
- // seconds (i.e, seconds_upscale * 1.5). This is because the framerate
- // increases before spatial size recovers, so it will take 150 samples to
- // recover spatial size (300 for twice).
- res = TriggerResolutionChange(kReportLowQP, seconds_upscale * 1.5,
- initial_framerate);
- EXPECT_EQ(-1, res.framerate);
- EXPECT_EQ(init_width, res.width);
- EXPECT_EQ(init_height, res.height);
-}
-
-// In 3 seconds test, only framerate adjusting should happen and 5 second
-// upscaling duration, only a framerate adjusting should happen.
-TEST_F(QualityScalerTest, ChangeFramerateOnly) {
- VerifyQualityAdaptation(kFramerate, kMeasureSecondsDownscale,
- kMeasureSecondsUpscale, false, true);
-}
-
-// In 6 seconds test, framerate adjusting and scaling are both
-// triggered, it shows that scaling would happen after framerate
-// adjusting.
-TEST_F(QualityScalerTest, ChangeFramerateAndSpatialSize) {
- VerifyQualityAdaptation(kFramerate, kMeasureSecondsDownscale * 2,
- kMeasureSecondsUpscale * 2, true, true);
-}
-
-// When starting from a low framerate, only spatial size will be changed.
-TEST_F(QualityScalerTest, ChangeSpatialSizeOnly) {
- qs_.ReportFramerate(kFramerate >> 1);
- VerifyQualityAdaptation(kFramerate >> 1, kMeasureSecondsDownscale * 2,
- kMeasureSecondsUpscale * 2, true, false);
-}
-
TEST_F(QualityScalerTest, DoesNotDownscaleBelow2xDefaultMinDimensionsWidth) {
DoesNotDownscaleFrameDimensions(
- 2 * QualityScaler::kDefaultMinDownscaleDimension - 1, 1000);
+ 2 * kMinDownscaleDimension - 1, 1000);
}
TEST_F(QualityScalerTest, DoesNotDownscaleBelow2xDefaultMinDimensionsHeight) {
DoesNotDownscaleFrameDimensions(
- 1000, 2 * QualityScaler::kDefaultMinDownscaleDimension - 1);
+ 1000, 2 * kMinDownscaleDimension - 1);
}
TEST_F(QualityScalerTest, DownscaleToVgaOnLowInitialBitrate) {
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, true,
- kLowInitialBitrateKbps, kWidth, kHeight, kFramerate);
+ static const int kWidth720p = 1280;
+ static const int kHeight720p = 720;
+ static const int kInitialBitrateKbps = 300;
+ input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p,
+ kWidth720p / 2, kWidth720p / 2);
+ qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
+ kWidth720p, kHeight720p, kFramerate);
qs_.OnEncodeFrame(input_frame_);
int init_width = qs_.GetScaledResolution().width;
int init_height = qs_.GetScaledResolution().height;
- EXPECT_LE(init_width, kWidthVga);
- EXPECT_LE(init_height, kHeightVga);
+ EXPECT_EQ(640, init_width);
+ EXPECT_EQ(360, init_height);
+}
+
+TEST_F(QualityScalerTest, DownscaleToQvgaOnLowerInitialBitrate) {
+ static const int kWidth720p = 1280;
+ static const int kHeight720p = 720;
+ static const int kInitialBitrateKbps = 200;
+ input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p,
+ kWidth720p / 2, kWidth720p / 2);
+ qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
+ kWidth720p, kHeight720p, kFramerate);
+ qs_.OnEncodeFrame(input_frame_);
+ int init_width = qs_.GetScaledResolution().width;
+ int init_height = qs_.GetScaledResolution().height;
+ EXPECT_EQ(320, init_width);
+ EXPECT_EQ(180, init_height);
}
TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
- QualityScalerTest::Resolution initial_res;
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false, 0,
- kWidth, kHeight, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, 0, kWidth, kHeight, kFramerate);
qs_.OnEncodeFrame(input_frame_);
- initial_res.width = qs_.GetScaledResolution().width;
- initial_res.height = qs_.GetScaledResolution().height;
+ QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not downscale if less than kMeasureSecondsDownscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsDownscale - 1; ++i) {
@@ -431,12 +327,10 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
}
TEST_F(QualityScalerTest, UpscaleQuicklyInitiallyAfterMeasuredSeconds) {
- QualityScalerTest::Resolution initial_res;
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false,
- kLowInitialBitrateKbps, kWidth, kHeight, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, kLowInitialBitrateKbps, kWidth, kHeight,
+ kFramerate);
qs_.OnEncodeFrame(input_frame_);
- initial_res.width = qs_.GetScaledResolution().width;
- initial_res.height = qs_.GetScaledResolution().height;
+ QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not upscale if less than kMeasureSecondsFastUpscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsFastUpscale - 1; ++i) {
@@ -480,36 +374,20 @@ void QualityScalerTest::DownscaleEndsAt(int input_width,
}
}
-TEST_F(QualityScalerTest, DefaultDownscalesTo160x90) {
- DownscaleEndsAt(320, 180, 160, 90);
-}
-
-TEST_F(QualityScalerTest, DefaultDownscalesTo90x160) {
- DownscaleEndsAt(180, 320, 90, 160);
-}
-
-TEST_F(QualityScalerTest, DefaultDownscalesFrom1280x720To160x90) {
- DownscaleEndsAt(1280, 720, 160, 90);
-}
-
-TEST_F(QualityScalerTest, DefaultDoesntDownscaleBelow160x90) {
- DownscaleEndsAt(320 - 1, 180 - 1, 320 - 1, 180 - 1);
+TEST_F(QualityScalerTest, DownscalesTo320x180) {
+ DownscaleEndsAt(640, 360, 320, 180);
}
-TEST_F(QualityScalerTest, DefaultDoesntDownscaleBelow90x160) {
- DownscaleEndsAt(180 - 1, 320 - 1, 180 - 1, 320 - 1);
+TEST_F(QualityScalerTest, DownscalesTo180x320) {
+ DownscaleEndsAt(360, 640, 180, 320);
}
-TEST_F(QualityScalerTest, RespectsMinResolutionWidth) {
- // Should end at 200x100, as width can't go lower.
- qs_.SetMinResolution(200, 10);
- DownscaleEndsAt(1600, 800, 200, 100);
+TEST_F(QualityScalerTest, DownscalesFrom1280x720To320x180) {
+ DownscaleEndsAt(1280, 720, 320, 180);
}
-TEST_F(QualityScalerTest, RespectsMinResolutionHeight) {
- // Should end at 100x200, as height can't go lower.
- qs_.SetMinResolution(10, 200);
- DownscaleEndsAt(800, 1600, 100, 200);
+TEST_F(QualityScalerTest, DoesntDownscaleInitialQvga) {
+ DownscaleEndsAt(320, 180, 320, 180);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp b/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
index 42cbb3d4e03..8edfd619863 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
@@ -20,6 +20,8 @@
'sources': [
'frame_dropper.cc',
'frame_dropper.h',
+ 'ivf_file_writer.cc',
+ 'ivf_file_writer.h',
'moving_average.h',
'qp_parser.cc',
'qp_parser.h',
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc b/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
index 631385d0f25..d88fb6cc21e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
@@ -74,8 +74,9 @@ static int VP8GetBit(VP8BitReader* const br, int prob) {
uint8_t range = br->range_;
if (br->bits_ < 0) {
VP8LoadNewBytes(br);
+ if (br->eof_)
+ return 0;
}
-
const int pos = br->bits_;
const uint8_t split = (range * prob) >> 8;
const uint8_t value = static_cast<uint8_t>(br->value_ >> pos);
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi b/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
index 7cfefed3ee4..27454a47115 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
@@ -22,19 +22,19 @@
],
'sources': [
# interfaces
- 'include/bitrate_adjuster.h',
'include/video_coding.h',
'include/video_coding_defines.h',
# headers
'codec_database.h',
'codec_timer.h',
- 'content_metrics_processing.h',
'decoding_state.h',
'encoded_frame.h',
'fec_tables_xor.h',
'frame_buffer.h',
+ 'frame_buffer2.h',
'frame_object.h',
+ 'rtp_frame_reference_finder.h',
'generic_decoder.h',
'generic_encoder.h',
'histogram.h',
@@ -50,8 +50,6 @@
'packet.h',
'packet_buffer.h',
'percentile_filter.h',
- 'qm_select_data.h',
- 'qm_select.h',
'receiver.h',
'rtt_filter.h',
'session_info.h',
@@ -60,14 +58,14 @@
'video_coding_impl.h',
# sources
- 'bitrate_adjuster.cc',
'codec_database.cc',
'codec_timer.cc',
- 'content_metrics_processing.cc',
'decoding_state.cc',
'encoded_frame.cc',
'frame_buffer.cc',
+ 'frame_buffer2.cc',
'frame_object.cc',
+ 'rtp_frame_reference_finder.cc',
'generic_decoder.cc',
'generic_encoder.cc',
'inter_frame_delay.cc',
@@ -80,7 +78,6 @@
'packet.cc',
'packet_buffer.cc',
'percentile_filter.cc',
- 'qm_select.cc',
'receiver.cc',
'rtt_filter.cc',
'session_info.cc',
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
index e5f0ee12222..72bcc9a0594 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
@@ -14,6 +14,7 @@
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/encoded_frame.h"
#include "webrtc/modules/video_coding/jitter_buffer.h"
@@ -53,7 +54,6 @@ class EncodedImageCallbackWrapper : public EncodedImageCallback {
callback_ = callback;
}
- // TODO(andresp): Change to void as return value is ignored.
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
@@ -73,23 +73,19 @@ class VideoCodingModuleImpl : public VideoCodingModule {
public:
VideoCodingModuleImpl(Clock* clock,
EventFactory* event_factory,
- bool owns_event_factory,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender)
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback)
: VideoCodingModule(),
- sender_(clock,
- &post_encode_callback_,
- encoder_rate_observer,
- qm_settings_callback),
+ sender_(clock, &post_encode_callback_, encoder_rate_observer, nullptr),
receiver_(clock,
event_factory,
+ pre_decode_image_callback,
nack_sender,
- keyframe_request_sender),
- own_event_factory_(owns_event_factory ? event_factory : NULL) {}
+ keyframe_request_sender) {}
- virtual ~VideoCodingModuleImpl() { own_event_factory_.reset(); }
+ virtual ~VideoCodingModuleImpl() {}
int64_t TimeUntilNextProcess() override {
int64_t sender_time = sender_.TimeUntilNextProcess();
@@ -132,16 +128,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return sender_.SetChannelParameters(target_bitrate, lossRate, rtt);
}
- int32_t RegisterTransportCallback(
- VCMPacketizationCallback* transport) override {
- return sender_.RegisterTransportCallback(transport);
- }
-
- int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) override {
- return sender_.RegisterSendStatisticsCallback(sendStats);
- }
-
int32_t RegisterProtectionCallback(
VCMProtectionCallback* protection) override {
return sender_.RegisterProtectionCallback(protection);
@@ -156,12 +142,11 @@ class VideoCodingModuleImpl : public VideoCodingModule {
}
int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) override {
- return sender_.AddVideoFrame(videoFrame, contentMetrics, codecSpecificInfo);
+ return sender_.AddVideoFrame(videoFrame, codecSpecificInfo);
}
- int32_t IntraFrameRequest(int stream_index) override {
+ int32_t IntraFrameRequest(size_t stream_index) override {
return sender_.IntraFrameRequest(stream_index);
}
@@ -212,11 +197,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return receiver_.RegisterPacketRequestCallback(callback);
}
- int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) override {
- return receiver_.RegisterRenderBufferSizeCallback(callback);
- }
-
int32_t Decode(uint16_t maxWaitTimeMs) override {
return receiver_.Decode(maxWaitTimeMs);
}
@@ -273,10 +253,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return receiver_.SetReceiveChannelParameters(rtt);
}
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer) override {
- receiver_.RegisterPreDecodeImageCallback(observer);
- }
-
void RegisterPostEncodeImageCallback(
EncodedImageCallback* observer) override {
post_encode_callback_.Register(observer);
@@ -288,7 +264,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
EncodedImageCallbackWrapper post_encode_callback_;
vcm::VideoSender sender_;
vcm::VideoReceiver receiver_;
- std::unique_ptr<EventFactory> own_event_factory_;
};
} // namespace
@@ -305,7 +280,8 @@ VideoCodingModule* VideoCodingModule::Create(
return VideoCodingModule::Create(clock, encoder_rate_observer,
qm_settings_callback,
nullptr, // NackSender
- nullptr); // KeyframeRequestSender
+ nullptr, // KeyframeRequestSender
+ nullptr); // Pre-decode image callback
}
// Create method for the new jitter buffer.
@@ -314,11 +290,11 @@ VideoCodingModule* VideoCodingModule::Create(
VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender) {
- return new VideoCodingModuleImpl(clock, new EventFactoryImpl, true,
- encoder_rate_observer, qm_settings_callback,
- nack_sender,
- keyframe_request_sender);
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback) {
+ return new VideoCodingModuleImpl(clock, nullptr, encoder_rate_observer,
+ nack_sender, keyframe_request_sender,
+ pre_decode_image_callback);
}
// Create method for current interface, will be removed when the
@@ -338,9 +314,8 @@ VideoCodingModule* VideoCodingModule::Create(
KeyFrameRequestSender* keyframe_request_sender) {
assert(clock);
assert(event_factory);
- return new VideoCodingModuleImpl(clock, event_factory, false, nullptr,
- nullptr, nack_sender,
- keyframe_request_sender);
+ return new VideoCodingModuleImpl(clock, event_factory, nullptr, nack_sender,
+ keyframe_request_sender, nullptr);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
index f5f9b00206c..c9992b7f9ce 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
@@ -14,10 +14,13 @@
#include "webrtc/modules/video_coding/include/video_coding.h"
#include <memory>
+#include <string>
#include <vector>
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/video_coding/codec_database.h"
#include "webrtc/modules/video_coding/frame_buffer.h"
#include "webrtc/modules/video_coding/generic_decoder.h"
@@ -28,7 +31,6 @@
#include "webrtc/modules/video_coding/timing.h"
#include "webrtc/modules/video_coding/utility/qp_parser.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -50,14 +52,14 @@ class VCMProcessTimer {
int64_t _latestMs;
};
-class VideoSender {
+class VideoSender : public Module {
public:
typedef VideoCodingModule::SenderNackMode SenderNackMode;
VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback);
+ VCMSendStatisticsCallback* send_stats_callback);
~VideoSender();
@@ -78,23 +80,20 @@ class VideoSender {
uint8_t lossRate,
int64_t rtt);
- int32_t RegisterTransportCallback(VCMPacketizationCallback* transport);
- int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats);
int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
void SetVideoProtection(VCMVideoProtection videoProtection);
int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* _contentMetrics,
const CodecSpecificInfo* codecSpecificInfo);
- int32_t IntraFrameRequest(int stream_index);
+ int32_t IntraFrameRequest(size_t stream_index);
int32_t EnableFrameDropper(bool enable);
void SuspendBelowMinBitrate();
bool VideoSuspended() const;
- int64_t TimeUntilNextProcess();
- void Process();
+ int64_t TimeUntilNextProcess() override;
+ void Process() override;
private:
void SetEncoderParameters(EncoderParameters params)
@@ -102,12 +101,11 @@ class VideoSender {
Clock* const clock_;
- std::unique_ptr<CriticalSectionWrapper> process_crit_sect_;
rtc::CriticalSection encoder_crit_;
VCMGenericEncoder* _encoder;
- VCMEncodedFrameCallback _encodedFrameCallback GUARDED_BY(encoder_crit_);
media_optimization::MediaOptimization _mediaOpt;
- VCMSendStatisticsCallback* _sendStatsCallback GUARDED_BY(process_crit_sect_);
+ VCMEncodedFrameCallback _encodedFrameCallback GUARDED_BY(encoder_crit_);
+ VCMSendStatisticsCallback* const send_stats_callback_;
VCMCodecDataBase _codecDataBase GUARDED_BY(encoder_crit_);
bool frame_dropper_enabled_ GUARDED_BY(encoder_crit_);
VCMProcessTimer _sendStatsTimer;
@@ -116,21 +114,22 @@ class VideoSender {
VideoCodec current_codec_;
rtc::ThreadChecker main_thread_;
- VCMQMSettingsCallback* const qm_settings_callback_;
VCMProtectionCallback* protection_callback_;
rtc::CriticalSection params_crit_;
EncoderParameters encoder_params_ GUARDED_BY(params_crit_);
bool encoder_has_internal_source_ GUARDED_BY(params_crit_);
+ std::string encoder_name_ GUARDED_BY(params_crit_);
std::vector<FrameType> next_frame_types_ GUARDED_BY(params_crit_);
};
-class VideoReceiver {
+class VideoReceiver : public Module {
public:
typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
VideoReceiver(Clock* clock,
EventFactory* event_factory,
+ EncodedImageCallback* pre_decode_image_callback,
NackSender* nack_sender = nullptr,
KeyFrameRequestSender* keyframe_request_sender = nullptr);
~VideoReceiver();
@@ -148,7 +147,6 @@ class VideoReceiver {
VCMDecoderTimingCallback* decoderTiming);
int32_t RegisterFrameTypeCallback(VCMFrameTypeCallback* frameTypeCallback);
int32_t RegisterPacketRequestCallback(VCMPacketRequestCallback* callback);
- int RegisterRenderBufferSizeCallback(VCMRenderBufferSizeCallback* callback);
int32_t Decode(uint16_t maxWaitTimeMs);
@@ -175,50 +173,43 @@ class VideoReceiver {
int32_t SetReceiveChannelParameters(int64_t rtt);
int32_t SetVideoProtection(VCMVideoProtection videoProtection, bool enable);
- int64_t TimeUntilNextProcess();
- void Process();
+ int64_t TimeUntilNextProcess() override;
+ void Process() override;
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer);
void TriggerDecoderShutdown();
protected:
int32_t Decode(const webrtc::VCMEncodedFrame& frame)
- EXCLUSIVE_LOCKS_REQUIRED(_receiveCritSect);
+ EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
int32_t RequestKeyFrame();
int32_t RequestSliceLossIndication(const uint64_t pictureID) const;
private:
Clock* const clock_;
- std::unique_ptr<CriticalSectionWrapper> process_crit_sect_;
- CriticalSectionWrapper* _receiveCritSect;
+ rtc::CriticalSection process_crit_;
+ rtc::CriticalSection receive_crit_;
VCMTiming _timing;
VCMReceiver _receiver;
VCMDecodedFrameCallback _decodedFrameCallback;
- VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_sect_);
- VCMReceiveStatisticsCallback* _receiveStatsCallback
- GUARDED_BY(process_crit_sect_);
- VCMDecoderTimingCallback* _decoderTimingCallback
- GUARDED_BY(process_crit_sect_);
- VCMPacketRequestCallback* _packetRequestCallback
- GUARDED_BY(process_crit_sect_);
- VCMRenderBufferSizeCallback* render_buffer_callback_
- GUARDED_BY(process_crit_sect_);
+ VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_);
+ VCMReceiveStatisticsCallback* _receiveStatsCallback GUARDED_BY(process_crit_);
+ VCMDecoderTimingCallback* _decoderTimingCallback GUARDED_BY(process_crit_);
+ VCMPacketRequestCallback* _packetRequestCallback GUARDED_BY(process_crit_);
VCMGenericDecoder* _decoder;
-#ifdef DEBUG_DECODER_BIT_STREAM
- FILE* _bitStreamBeforeDecoder;
-#endif
+
VCMFrameBuffer _frameFromFile;
- bool _scheduleKeyRequest GUARDED_BY(process_crit_sect_);
- bool drop_frames_until_keyframe_ GUARDED_BY(process_crit_sect_);
- size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
+ bool _scheduleKeyRequest GUARDED_BY(process_crit_);
+ bool drop_frames_until_keyframe_ GUARDED_BY(process_crit_);
+ size_t max_nack_list_size_ GUARDED_BY(process_crit_);
- VCMCodecDataBase _codecDataBase GUARDED_BY(_receiveCritSect);
- EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
+ VCMCodecDataBase _codecDataBase GUARDED_BY(receive_crit_);
+ EncodedImageCallback* pre_decode_image_callback_;
VCMProcessTimer _receiveStatsTimer;
VCMProcessTimer _retransmissionTimer;
VCMProcessTimer _keyRequestTimer;
QpParser qp_parser_;
+ ThreadUnsafeOneTimeEvent first_frame_received_;
};
} // namespace vcm
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc b/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
index 5aadcf91e0e..a832e2180b9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
@@ -20,18 +20,15 @@
#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/system_wrappers/include/clock.h"
-// #define DEBUG_DECODER_BIT_STREAM
-
namespace webrtc {
namespace vcm {
VideoReceiver::VideoReceiver(Clock* clock,
EventFactory* event_factory,
+ EncodedImageCallback* pre_decode_image_callback,
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender)
: clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- _receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_timing(clock_),
_receiver(&_timing,
clock_,
@@ -39,50 +36,36 @@ VideoReceiver::VideoReceiver(Clock* clock,
nack_sender,
keyframe_request_sender),
_decodedFrameCallback(&_timing, clock_),
- _frameTypeCallback(NULL),
- _receiveStatsCallback(NULL),
- _decoderTimingCallback(NULL),
- _packetRequestCallback(NULL),
- render_buffer_callback_(NULL),
- _decoder(NULL),
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder(NULL),
-#endif
+ _frameTypeCallback(nullptr),
+ _receiveStatsCallback(nullptr),
+ _decoderTimingCallback(nullptr),
+ _packetRequestCallback(nullptr),
+ _decoder(nullptr),
_frameFromFile(),
_scheduleKeyRequest(false),
drop_frames_until_keyframe_(false),
max_nack_list_size_(0),
_codecDataBase(nullptr, nullptr),
- pre_decode_image_callback_(NULL),
+ pre_decode_image_callback_(pre_decode_image_callback),
_receiveStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
- _keyRequestTimer(500, clock_) {
- assert(clock_);
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
-#endif
-}
+ _keyRequestTimer(500, clock_) {}
-VideoReceiver::~VideoReceiver() {
- delete _receiveCritSect;
-#ifdef DEBUG_DECODER_BIT_STREAM
- fclose(_bitStreamBeforeDecoder);
-#endif
-}
+VideoReceiver::~VideoReceiver() {}
void VideoReceiver::Process() {
// Receive-side statistics
if (_receiveStatsTimer.TimeUntilProcess() == 0) {
_receiveStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_receiveStatsCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_receiveStatsCallback != nullptr) {
uint32_t bitRate;
uint32_t frameRate;
_receiver.ReceiveStatistics(&bitRate, &frameRate);
_receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
}
- if (_decoderTimingCallback != NULL) {
+ if (_decoderTimingCallback != nullptr) {
int decode_ms;
int max_decode_ms;
int current_delay_ms;
@@ -97,12 +80,6 @@ void VideoReceiver::Process() {
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
}
-
- // Size of render buffer.
- if (render_buffer_callback_) {
- int buffer_size_ms = _receiver.RenderBufferSizeMs();
- render_buffer_callback_->RenderBufferSizeMs(buffer_size_ms);
- }
}
// Key frame requests
@@ -110,8 +87,8 @@ void VideoReceiver::Process() {
_keyRequestTimer.Processed();
bool request_key_frame = false;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
- request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
+ rtc::CritScope cs(&process_crit_);
+ request_key_frame = _scheduleKeyRequest && _frameTypeCallback != nullptr;
}
if (request_key_frame)
RequestKeyFrame();
@@ -129,9 +106,9 @@ void VideoReceiver::Process() {
bool callback_registered = false;
uint16_t length;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
length = max_nack_list_size_;
- callback_registered = _packetRequestCallback != NULL;
+ callback_registered = _packetRequestCallback != nullptr;
}
if (callback_registered && length > 0) {
// Collect sequence numbers from the default receiver.
@@ -142,8 +119,8 @@ void VideoReceiver::Process() {
ret = RequestKeyFrame();
}
if (ret == VCM_OK && !nackList.empty()) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_packetRequestCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_packetRequestCallback != nullptr) {
_packetRequestCallback->ResendPackets(&nackList[0], nackList.size());
}
}
@@ -168,7 +145,7 @@ int64_t VideoReceiver::TimeUntilNextProcess() {
}
int32_t VideoReceiver::SetReceiveChannelParameters(int64_t rtt) {
- CriticalSectionScoped receiveCs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_receiver.UpdateRtt(rtt);
return 0;
}
@@ -189,9 +166,11 @@ int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
}
case kProtectionNackFEC: {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
RTC_DCHECK(enable);
- _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
+ _receiver.SetNackMode(kNack,
+ media_optimization::kLowRttNackMs,
+ media_optimization::kMaxRttDelayThreshold);
_receiver.SetDecodeErrorMode(kNoErrors);
break;
}
@@ -210,14 +189,14 @@ int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
// ready for rendering.
int32_t VideoReceiver::RegisterReceiveCallback(
VCMReceiveCallback* receiveCallback) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
return VCM_OK;
}
int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
VCMReceiveStatisticsCallback* receiveStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_receiver.RegisterStatsCallback(receiveStats);
_receiveStatsCallback = receiveStats;
return VCM_OK;
@@ -225,7 +204,7 @@ int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
int32_t VideoReceiver::RegisterDecoderTimingCallback(
VCMDecoderTimingCallback* decoderTiming) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_decoderTimingCallback = decoderTiming;
return VCM_OK;
}
@@ -233,10 +212,10 @@ int32_t VideoReceiver::RegisterDecoderTimingCallback(
// Register an externally defined decoder object.
void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
uint8_t payloadType) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (externalDecoder == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (externalDecoder == nullptr) {
// Make sure the VCM updates the decoder next time it decodes.
- _decoder = NULL;
+ _decoder = nullptr;
RTC_CHECK(_codecDataBase.DeregisterExternalDecoder(payloadType));
return;
}
@@ -246,25 +225,18 @@ void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
// Register a frame type request callback.
int32_t VideoReceiver::RegisterFrameTypeCallback(
VCMFrameTypeCallback* frameTypeCallback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_frameTypeCallback = frameTypeCallback;
return VCM_OK;
}
int32_t VideoReceiver::RegisterPacketRequestCallback(
VCMPacketRequestCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_packetRequestCallback = callback;
return VCM_OK;
}
-int VideoReceiver::RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- render_buffer_callback_ = callback;
- return VCM_OK;
-}
-
void VideoReceiver::TriggerDecoderShutdown() {
_receiver.TriggerDecoderShutdown();
}
@@ -275,7 +247,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
int64_t nextRenderTimeMs;
bool prefer_late_decoding = false;
{
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
prefer_late_decoding = _codecDataBase.PrefersLateDecoding();
}
@@ -286,7 +258,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
return VCM_FRAME_NOT_READY;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
if (drop_frames_until_keyframe_) {
// Still getting delta frames, schedule another keyframe request as if
// decode failed.
@@ -298,11 +270,6 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
drop_frames_until_keyframe_ = false;
}
}
- CriticalSectionScoped cs(_receiveCritSect);
-
- // If this frame was too late, we should adjust the delay accordingly
- _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
- clock_->TimeInMilliseconds());
if (pre_decode_image_callback_) {
EncodedImage encoded_image(frame->EncodedImage());
@@ -311,18 +278,20 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
encoded_image.qp_ = qp;
}
pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(),
- NULL);
+ nullptr);
}
-#ifdef DEBUG_DECODER_BIT_STREAM
- if (_bitStreamBeforeDecoder != NULL) {
- // Write bit stream to file for debugging purposes
- if (fwrite(frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) !=
- frame->Length()) {
- return -1;
- }
+ rtc::CritScope cs(&receive_crit_);
+ // If this frame was too late, we should adjust the delay accordingly
+ _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
+ clock_->TimeInMilliseconds());
+
+ if (first_frame_received_()) {
+ LOG(LS_INFO) << "Received first "
+ << (frame->Complete() ? "complete" : "incomplete")
+ << " decodable video frame";
}
-#endif
+
const int32_t ret = Decode(*frame);
_receiver.ReleaseFrame(frame);
return ret;
@@ -331,8 +300,8 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
int32_t VideoReceiver::RequestSliceLossIndication(
const uint64_t pictureID) const {
TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID);
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_frameTypeCallback != nullptr) {
const int32_t ret =
_frameTypeCallback->SliceLossIndicationRequest(pictureID);
if (ret < 0) {
@@ -346,8 +315,8 @@ int32_t VideoReceiver::RequestSliceLossIndication(
int32_t VideoReceiver::RequestKeyFrame() {
TRACE_EVENT0("webrtc", "RequestKeyFrame");
- CriticalSectionScoped process_cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_frameTypeCallback != nullptr) {
const int32_t ret = _frameTypeCallback->RequestKeyFrame();
if (ret < 0) {
return ret;
@@ -365,7 +334,7 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
"type", frame.FrameType());
// Change decoder if payload type has changed
_decoder = _codecDataBase.GetDecoder(frame, &_decodedFrameCallback);
- if (_decoder == NULL) {
+ if (_decoder == nullptr) {
return VCM_NO_CODEC_REGISTERED;
}
// Decode a frame
@@ -389,7 +358,7 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
ret = VCM_OK;
}
if (request_key_frame) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_scheduleKeyRequest = true;
}
TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
@@ -400,8 +369,8 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
int32_t numberOfCores,
bool requireKeyFrame) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (receiveCodec == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (receiveCodec == nullptr) {
return VCM_PARAMETER_ERROR;
}
if (!_codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores,
@@ -413,8 +382,8 @@ int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
// Get current received codec
int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
- CriticalSectionScoped cs(_receiveCritSect);
- if (currentReceiveCodec == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (currentReceiveCodec == nullptr) {
return VCM_PARAMETER_ERROR;
}
return _codecDataBase.ReceiveCodec(currentReceiveCodec) ? 0 : -1;
@@ -422,7 +391,7 @@ int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
// Get current received codec
VideoCodecType VideoReceiver::ReceiveCodec() const {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
return _codecDataBase.ReceiveCodec();
}
@@ -434,7 +403,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
TRACE_EVENT1("webrtc", "VCM::PacketKeyFrame", "seqnum",
rtpInfo.header.sequenceNumber);
}
- if (incomingPayload == NULL) {
+ if (incomingPayload == nullptr) {
// The jitter buffer doesn't handle non-zero payload lengths for packets
// without payload.
// TODO(holmer): We should fix this in the jitter buffer.
@@ -443,11 +412,12 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
int32_t ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
rtpInfo.type.Video.height);
+
// TODO(holmer): Investigate if this somehow should use the key frame
// request scheduling to throttle the requests.
if (ret == VCM_FLUSH_INDICATOR) {
{
- CriticalSectionScoped process_cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
drop_frames_until_keyframe_ = true;
}
RequestKeyFrame();
@@ -484,7 +454,7 @@ uint32_t VideoReceiver::DiscardedPackets() const {
int VideoReceiver::SetReceiverRobustnessMode(
ReceiverRobustness robustnessMode,
VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
switch (robustnessMode) {
case VideoCodingModule::kNone:
_receiver.SetNackMode(kNoNack, -1, -1);
@@ -520,7 +490,7 @@ int VideoReceiver::SetReceiverRobustnessMode(
}
void VideoReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_receiver.SetDecodeErrorMode(decode_error_mode);
}
@@ -528,7 +498,7 @@ void VideoReceiver::SetNackSettings(size_t max_nack_list_size,
int max_packet_age_to_nack,
int max_incomplete_time_ms) {
if (max_nack_list_size != 0) {
- CriticalSectionScoped process_cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
max_nack_list_size_ = max_nack_list_size;
}
_receiver.SetNackSettings(max_nack_list_size, max_packet_age_to_nack,
@@ -539,11 +509,5 @@ int VideoReceiver::SetMinReceiverDelay(int desired_delay_ms) {
return _receiver.SetMinReceiverDelay(desired_delay_ms);
}
-void VideoReceiver::RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) {
- CriticalSectionScoped cs(_receiveCritSect);
- pre_decode_image_callback_ = observer;
-}
-
} // namespace vcm
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
index 05656a5c254..3c414053bc2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
@@ -33,7 +33,7 @@ class TestVideoReceiver : public ::testing::Test {
TestVideoReceiver() : clock_(0) {}
virtual void SetUp() {
- receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
+ receiver_.reset(new VideoReceiver(&clock_, &event_factory_, nullptr));
receiver_->RegisterExternalDecoder(&decoder_, kUnusedPayloadType);
const size_t kMaxNackListSize = 250;
const int kMaxPacketAgeToNack = 450;
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_sender.cc b/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
index 49690697889..f52b1c56e7b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
@@ -27,27 +27,24 @@ namespace vcm {
VideoSender::VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback)
+ VCMSendStatisticsCallback* send_stats_callback)
: clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(nullptr),
- _encodedFrameCallback(post_encode_callback),
_mediaOpt(clock_),
- _sendStatsCallback(nullptr),
+ _encodedFrameCallback(post_encode_callback, &_mediaOpt),
+ send_stats_callback_(send_stats_callback),
_codecDataBase(encoder_rate_observer, &_encodedFrameCallback),
frame_dropper_enabled_(true),
_sendStatsTimer(1000, clock_),
current_codec_(),
- qm_settings_callback_(qm_settings_callback),
protection_callback_(nullptr),
encoder_params_({0, 0, 0, 0}),
encoder_has_internal_source_(false),
next_frame_types_(1, kVideoFrameDelta) {
+ _mediaOpt.Reset();
// Allow VideoSender to be created on one thread but used on another, post
// construction. This is currently how this class is being used by at least
// one external project (diffractor).
- _mediaOpt.EnableQM(qm_settings_callback_ != nullptr);
- _mediaOpt.Reset();
main_thread_.DetachFromThread();
}
@@ -55,12 +52,19 @@ VideoSender::~VideoSender() {}
void VideoSender::Process() {
if (_sendStatsTimer.TimeUntilProcess() == 0) {
+ // |_sendStatsTimer.Processed()| must be called. Otherwise
+ // VideoSender::Process() will be called in an infinite loop.
_sendStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_sendStatsCallback != nullptr) {
+ if (send_stats_callback_) {
uint32_t bitRate = _mediaOpt.SentBitRate();
uint32_t frameRate = _mediaOpt.SentFrameRate();
- _sendStatsCallback->SendStatistics(bitRate, frameRate);
+ std::string encoder_name;
+ {
+ rtc::CritScope cs(&params_crit_);
+ // Copy the string here so that we don't hold |params_crit_| in the CB.
+ encoder_name = encoder_name_;
+ }
+ send_stats_callback_->SendStatistics(bitRate, frameRate, encoder_name);
}
}
@@ -196,19 +200,38 @@ int VideoSender::FrameRate(unsigned int* framerate) const {
int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
uint8_t lossRate,
int64_t rtt) {
- uint32_t target_rate =
- _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
- protection_callback_, qm_settings_callback_);
+ uint32_t target_rate = _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
+ protection_callback_);
uint32_t input_frame_rate = _mediaOpt.InputFrameRate();
- rtc::CritScope cs(&params_crit_);
- encoder_params_ = {target_rate, lossRate, rtt, input_frame_rate};
+ EncoderParameters encoder_params = {target_rate, lossRate, rtt,
+ input_frame_rate};
+ bool encoder_has_internal_source;
+ {
+ rtc::CritScope cs(&params_crit_);
+ encoder_params_ = encoder_params;
+ encoder_has_internal_source = encoder_has_internal_source_;
+ }
+
+ // For encoders with internal sources, we need to tell the encoder directly,
+ // instead of waiting for an AddVideoFrame that will never come (internal
+ // source encoders don't get input frames).
+ if (encoder_has_internal_source) {
+ rtc::CritScope cs(&encoder_crit_);
+ if (_encoder) {
+ SetEncoderParameters(encoder_params);
+ }
+ }
return VCM_OK;
}
void VideoSender::SetEncoderParameters(EncoderParameters params) {
+ // |target_bitrate == 0 | means that the network is down or the send pacer is
+ // full.
+ // TODO(perkj): Consider setting |target_bitrate| == 0 to the encoders.
+ // Especially if |encoder_has_internal_source_ | == true.
if (params.target_bitrate == 0)
return;
@@ -220,24 +243,6 @@ void VideoSender::SetEncoderParameters(EncoderParameters params) {
_encoder->SetEncoderParameters(params);
}
-int32_t VideoSender::RegisterTransportCallback(
- VCMPacketizationCallback* transport) {
- rtc::CritScope lock(&encoder_crit_);
- _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
- _encodedFrameCallback.SetTransportCallback(transport);
- return VCM_OK;
-}
-
-// Register video output information callback which will be called to deliver
-// information about the video stream produced by the encoder, for instance the
-// average frame rate and bit rate.
-int32_t VideoSender::RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _sendStatsCallback = sendStats;
- return VCM_OK;
-}
-
// Register a video protection callback which will be called to deliver the
// requested FEC rate and NACK status (on/off).
// Note: this callback is assumed to only be registered once and before it is
@@ -269,7 +274,6 @@ void VideoSender::SetVideoProtection(VCMVideoProtection videoProtection) {
}
// Add one raw video frame to the encoder, blocking.
int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) {
EncoderParameters encoder_params;
std::vector<FrameType> next_frame_types;
@@ -291,7 +295,6 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
_encoder->OnDroppedFrame();
return VCM_OK;
}
- _mediaOpt.UpdateContentData(contentMetrics);
// TODO(pbos): Make sure setting send codec is synchronized with video
// processing so frame size always matches.
if (!_codecDataBase.MatchesCurrentResolution(videoFrame.width(),
@@ -300,7 +303,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
return VCM_PARAMETER_ERROR;
}
VideoFrame converted_frame = videoFrame;
- if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) {
+ if (converted_frame.video_frame_buffer()->native_handle() &&
+ !_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
converted_frame = converted_frame.ConvertNativeToI420Frame();
@@ -313,9 +317,12 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
return ret;
}
+
{
- // Change all keyframe requests to encode delta frames the next time.
rtc::CritScope lock(&params_crit_);
+ encoder_name_ = _encoder->ImplementationName();
+
+ // Change all keyframe requests to encode delta frames the next time.
for (size_t i = 0; i < next_frame_types_.size(); ++i) {
// Check for equality (same requested as before encoding) to not
// accidentally drop a keyframe request while encoding.
@@ -323,16 +330,13 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
next_frame_types_[i] = kVideoFrameDelta;
}
}
- if (qm_settings_callback_)
- qm_settings_callback_->SetTargetFramerate(_encoder->GetTargetFramerate());
return VCM_OK;
}
-int32_t VideoSender::IntraFrameRequest(int stream_index) {
+int32_t VideoSender::IntraFrameRequest(size_t stream_index) {
{
rtc::CritScope lock(&params_crit_);
- if (stream_index < 0 ||
- static_cast<size_t>(stream_index) >= next_frame_types_.size()) {
+ if (stream_index >= next_frame_types_.size()) {
return -1;
}
next_frame_types_[stream_index] = kVideoFrameKey;
@@ -346,7 +350,7 @@ int32_t VideoSender::IntraFrameRequest(int stream_index) {
// encoder_crit_.
rtc::CritScope lock(&encoder_crit_);
rtc::CritScope params_lock(&params_crit_);
- if (static_cast<size_t>(stream_index) >= next_frame_types_.size())
+ if (stream_index >= next_frame_types_.size())
return -1;
if (_encoder != nullptr && _encoder->InternalSource()) {
// Try to request the frame if we have an external encoder with
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
index 3f9ba4eadda..5324ceeb0b8 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
@@ -86,19 +86,19 @@ class EmptyFrameGenerator : public FrameGenerator {
std::unique_ptr<VideoFrame> frame_;
};
-class PacketizationCallback : public VCMPacketizationCallback {
+class EncodedImageCallbackImpl : public EncodedImageCallback {
public:
- explicit PacketizationCallback(Clock* clock)
+ explicit EncodedImageCallbackImpl(Clock* clock)
: clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
- virtual ~PacketizationCallback() {}
+ virtual ~EncodedImageCallbackImpl() {}
- int32_t SendData(uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation_header,
- const RTPVideoHeader* rtp_video_header) override {
- assert(rtp_video_header);
- frame_data_.push_back(FrameData(encoded_image._length, *rtp_video_header));
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override {
+ assert(codec_specific_info);
+ frame_data_.push_back(
+ FrameData(encoded_image._length, *codec_specific_info));
return 0;
}
@@ -130,11 +130,12 @@ class PacketizationCallback : public VCMPacketizationCallback {
struct FrameData {
FrameData() {}
- FrameData(size_t payload_size, const RTPVideoHeader& rtp_video_header)
- : payload_size(payload_size), rtp_video_header(rtp_video_header) {}
+ FrameData(size_t payload_size, const CodecSpecificInfo& codec_specific_info)
+ : payload_size(payload_size),
+ codec_specific_info(codec_specific_info) {}
size_t payload_size;
- RTPVideoHeader rtp_video_header;
+ CodecSpecificInfo codec_specific_info;
};
int64_t interval_ms() {
@@ -146,9 +147,9 @@ class PacketizationCallback : public VCMPacketizationCallback {
int CountFramesWithinTemporalLayer(int temporal_layer) {
int frames = 0;
for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ EXPECT_EQ(kVideoCodecVP8, frame_data_[i].codec_specific_info.codecType);
const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ frame_data_[i].codec_specific_info.codecSpecific.VP8.temporalIdx;
if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
frames++;
}
@@ -158,9 +159,9 @@ class PacketizationCallback : public VCMPacketizationCallback {
size_t SumPayloadBytesWithinTemporalLayer(int temporal_layer) {
size_t payload_size = 0;
for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ EXPECT_EQ(kVideoCodecVP8, frame_data_[i].codec_specific_info.codecType);
const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ frame_data_[i].codec_specific_info.codecSpecific.VP8.temporalIdx;
if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
payload_size += frame_data_[i].payload_size;
}
@@ -176,22 +177,20 @@ class TestVideoSender : public ::testing::Test {
protected:
// Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
// a special case (e.g. frame rate in media optimization).
- TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
+ TestVideoSender() : clock_(1000), encoded_frame_callback_(&clock_) {}
void SetUp() override {
sender_.reset(
- new VideoSender(&clock_, &post_encode_callback_, nullptr, nullptr));
- EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
+ new VideoSender(&clock_, &encoded_frame_callback_, nullptr, nullptr));
}
void AddFrame() {
assert(generator_.get());
- sender_->AddVideoFrame(*generator_->NextFrame(), NULL, NULL);
+ sender_->AddVideoFrame(*generator_->NextFrame(), NULL);
}
SimulatedClock clock_;
- PacketizationCallback packetization_callback_;
- MockEncodedImageCallback post_encode_callback_;
+ EncodedImageCallbackImpl encoded_frame_callback_;
// Used by subclassing tests, need to outlive sender_.
std::unique_ptr<VideoEncoder> encoder_;
std::unique_ptr<VideoSender> sender_;
@@ -291,9 +290,17 @@ TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequests) {
EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
ExpectIntraRequest(-1);
AddFrame();
+}
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
- ExpectIntraRequest(-1);
+TEST_F(TestVideoSenderWithMockEncoder, TestSetRate) {
+ const uint32_t new_bitrate = settings_.startBitrate + 300;
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(1).WillOnce(Return(0));
+ sender_->SetChannelParameters(new_bitrate * 1000, 0, 200);
+ AddFrame();
+
+ // Expect no call to encoder_.SetRates if the new bitrate is zero.
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(0);
+ sender_->SetChannelParameters(0, 0, 200);
AddFrame();
}
@@ -314,7 +321,19 @@ TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequestsInternalCapture) {
EXPECT_EQ(0, sender_->IntraFrameRequest(2));
// No requests expected since these indices are out of bounds.
EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
+}
+
+TEST_F(TestVideoSenderWithMockEncoder, TestEncoderParametersForInternalSource) {
+ // De-register current external encoder.
+ sender_->RegisterExternalEncoder(nullptr, kUnusedPayloadType, false);
+ // Register encoder with internal capture.
+ sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, true);
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
+ // Update encoder bitrate parameters. We expect that to immediately call
+ // SetRates on the encoder without waiting for AddFrame processing.
+ const uint32_t new_bitrate = settings_.startBitrate + 300;
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(1).WillOnce(Return(0));
+ sender_->SetChannelParameters(new_bitrate * 1000, 0, 200);
}
TEST_F(TestVideoSenderWithMockEncoder, EncoderFramerateUpdatedViaProcess) {
@@ -402,8 +421,6 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
void InsertFrames(float framerate, float seconds) {
for (int i = 0; i < seconds * framerate; ++i) {
clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
- EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
- .WillOnce(Return(0));
AddFrame();
// SetChannelParameters needs to be called frequently to propagate
// framerate from the media optimization into the encoder.
@@ -422,10 +439,10 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
// It appears that this 5 seconds simulation is needed to allow
// bitrate and framerate to stabilize.
InsertFrames(framerate, short_simulation_interval);
- packetization_callback_.Reset();
+ encoded_frame_callback_.Reset();
InsertFrames(framerate, long_simulation_interval);
- return packetization_callback_.CalculateVp8StreamInfo();
+ return encoded_frame_callback_.CalculateVp8StreamInfo();
}
protected:
diff --git a/chromium/third_party/webrtc/modules/video_processing/BUILD.gn b/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
index 43a8de12557..1177d9b7f0d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
@@ -13,12 +13,6 @@ build_video_processing_sse2 = current_cpu == "x86" || current_cpu == "x64"
source_set("video_processing") {
sources = [
- "brightness_detection.cc",
- "brightness_detection.h",
- "content_analysis.cc",
- "content_analysis.h",
- "deflickering.cc",
- "deflickering.h",
"frame_preprocessor.cc",
"frame_preprocessor.h",
"include/video_processing.h",
@@ -67,7 +61,6 @@ source_set("video_processing") {
if (build_video_processing_sse2) {
source_set("video_processing_sse2") {
sources = [
- "content_analysis_sse2.cc",
"util/denoiser_filter_sse2.cc",
"util/denoiser_filter_sse2.h",
]
diff --git a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc b/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc
deleted file mode 100644
index 7455cf97591..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/brightness_detection.h"
-
-#include <math.h>
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-
-namespace webrtc {
-
-VPMBrightnessDetection::VPMBrightnessDetection() {
- Reset();
-}
-
-VPMBrightnessDetection::~VPMBrightnessDetection() {}
-
-void VPMBrightnessDetection::Reset() {
- frame_cnt_bright_ = 0;
- frame_cnt_dark_ = 0;
-}
-
-int32_t VPMBrightnessDetection::ProcessFrame(
- const VideoFrame& frame,
- const VideoProcessing::FrameStats& stats) {
- if (frame.IsZeroSize()) {
- return VPM_PARAMETER_ERROR;
- }
- int width = frame.width();
- int height = frame.height();
-
- if (!VideoProcessing::ValidFrameStats(stats)) {
- return VPM_PARAMETER_ERROR;
- }
-
- const uint8_t frame_cnt_alarm = 2;
-
- // Get proportion in lowest bins.
- uint8_t low_th = 20;
- float prop_low = 0;
- for (uint32_t i = 0; i < low_th; i++) {
- prop_low += stats.hist[i];
- }
- prop_low /= stats.num_pixels;
-
- // Get proportion in highest bins.
- unsigned char high_th = 230;
- float prop_high = 0;
- for (uint32_t i = high_th; i < 256; i++) {
- prop_high += stats.hist[i];
- }
- prop_high /= stats.num_pixels;
-
- if (prop_high < 0.4) {
- if (stats.mean < 90 || stats.mean > 170) {
- // Standard deviation of Y
- const uint8_t* buffer = frame.buffer(kYPlane);
- float std_y = 0;
- for (int h = 0; h < height; h += (1 << stats.sub_sampling_factor)) {
- int row = h * width;
- for (int w = 0; w < width; w += (1 << stats.sub_sampling_factor)) {
- std_y +=
- (buffer[w + row] - stats.mean) * (buffer[w + row] - stats.mean);
- }
- }
- std_y = sqrt(std_y / stats.num_pixels);
-
- // Get percentiles.
- uint32_t sum = 0;
- uint32_t median_y = 140;
- uint32_t perc05 = 0;
- uint32_t perc95 = 255;
- float pos_perc05 = stats.num_pixels * 0.05f;
- float pos_median = stats.num_pixels * 0.5f;
- float posPerc95 = stats.num_pixels * 0.95f;
- for (uint32_t i = 0; i < 256; i++) {
- sum += stats.hist[i];
- if (sum < pos_perc05)
- perc05 = i; // 5th perc.
- if (sum < pos_median)
- median_y = i; // 50th perc.
- if (sum < posPerc95)
- perc95 = i; // 95th perc.
- else
- break;
- }
-
- // Check if image is too dark
- if ((std_y < 55) && (perc05 < 50)) {
- if (median_y < 60 || stats.mean < 80 || perc95 < 130 ||
- prop_low > 0.20) {
- frame_cnt_dark_++;
- } else {
- frame_cnt_dark_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- }
-
- // Check if image is too bright
- if ((std_y < 52) && (perc95 > 200) && (median_y > 160)) {
- if (median_y > 185 || stats.mean > 185 || perc05 > 140 ||
- prop_high > 0.25) {
- frame_cnt_bright_++;
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_++;
- frame_cnt_dark_ = 0;
- }
-
- if (frame_cnt_dark_ > frame_cnt_alarm) {
- return VideoProcessing::kDarkWarning;
- } else if (frame_cnt_bright_ > frame_cnt_alarm) {
- return VideoProcessing::kBrightWarning;
- } else {
- return VideoProcessing::kNoWarning;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h b/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h
deleted file mode 100644
index 78a7ac5e0bf..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMBrightnessDetection {
- public:
- VPMBrightnessDetection();
- ~VPMBrightnessDetection();
-
- void Reset();
- int32_t ProcessFrame(const VideoFrame& frame,
- const VideoProcessing::FrameStats& stats);
-
- private:
- uint32_t frame_cnt_bright_;
- uint32_t frame_cnt_dark_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc b/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc
deleted file mode 100644
index 54c04da4668..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/video_processing/content_analysis.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-namespace webrtc {
-
-VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection)
- : orig_frame_(NULL),
- prev_frame_(NULL),
- width_(0),
- height_(0),
- skip_num_(1),
- border_(8),
- motion_magnitude_(0.0f),
- spatial_pred_err_(0.0f),
- spatial_pred_err_h_(0.0f),
- spatial_pred_err_v_(0.0f),
- first_frame_(true),
- ca_Init_(false),
- content_metrics_(NULL) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
-
- if (runtime_cpu_detection) {
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- if (WebRtc_GetCPUInfo(kSSE2)) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
- }
-#endif
- }
- Release();
-}
-
-VPMContentAnalysis::~VPMContentAnalysis() {
- Release();
-}
-
-VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics(
- const VideoFrame& inputFrame) {
- if (inputFrame.IsZeroSize())
- return NULL;
-
- // Init if needed (native dimension change).
- if (width_ != inputFrame.width() || height_ != inputFrame.height()) {
- if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
- return NULL;
- }
- // Only interested in the Y plane.
- orig_frame_ = inputFrame.buffer(kYPlane);
-
- // Compute spatial metrics: 3 spatial prediction errors.
- (this->*ComputeSpatialMetrics)();
-
- // Compute motion metrics
- if (first_frame_ == false)
- ComputeMotionMetrics();
-
- // Saving current frame as previous one: Y only.
- memcpy(prev_frame_, orig_frame_, width_ * height_);
-
- first_frame_ = false;
- ca_Init_ = true;
-
- return ContentMetrics();
-}
-
-int32_t VPMContentAnalysis::Release() {
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- content_metrics_ = NULL;
- }
-
- if (prev_frame_ != NULL) {
- delete[] prev_frame_;
- prev_frame_ = NULL;
- }
-
- width_ = 0;
- height_ = 0;
- first_frame_ = true;
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::Initialize(int width, int height) {
- width_ = width;
- height_ = height;
- first_frame_ = true;
-
- // skip parameter: # of skipped rows: for complexity reduction
- // temporal also currently uses it for column reduction.
- skip_num_ = 1;
-
- // use skipNum = 2 for 4CIF, WHD
- if ((height_ >= 576) && (width_ >= 704)) {
- skip_num_ = 2;
- }
- // use skipNum = 4 for FULLL_HD images
- if ((height_ >= 1080) && (width_ >= 1920)) {
- skip_num_ = 4;
- }
-
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- }
-
- if (prev_frame_ != NULL) {
- delete[] prev_frame_;
- }
-
- // Spatial Metrics don't work on a border of 8. Minimum processing
- // block size is 16 pixels. So make sure the width and height support this.
- if (width_ <= 32 || height_ <= 32) {
- ca_Init_ = false;
- return VPM_PARAMETER_ERROR;
- }
-
- content_metrics_ = new VideoContentMetrics();
- if (content_metrics_ == NULL) {
- return VPM_MEMORY;
- }
-
- prev_frame_ = new uint8_t[width_ * height_]; // Y only.
- if (prev_frame_ == NULL)
- return VPM_MEMORY;
-
- return VPM_OK;
-}
-
-// Compute motion metrics: magnitude over non-zero motion vectors,
-// and size of zero cluster
-int32_t VPMContentAnalysis::ComputeMotionMetrics() {
- // Motion metrics: only one is derived from normalized
- // (MAD) temporal difference
- (this->*TemporalDiffMetric)();
- return VPM_OK;
-}
-
-// Normalized temporal difference (MAD): used as a motion level metric
-// Normalize MAD by spatial contrast: images with more contrast
-// (pixel variance) likely have larger temporal difference
-// To reduce complexity, we compute the metric for a reduced set of points.
-int32_t VPMContentAnalysis::TemporalDiffMetric_C() {
- // size of original frame
- int sizei = height_;
- int sizej = width_;
- uint32_t tempDiffSum = 0;
- uint32_t pixelSum = 0;
- uint64_t pixelSqSum = 0;
-
- uint32_t num_pixels = 0; // Counter for # of pixels.
- const int width_end = ((width_ - 2 * border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- num_pixels += 1;
- int ssn = i * sizej + j;
-
- uint8_t currPixel = orig_frame_[ssn];
- uint8_t prevPixel = prev_frame_[ssn];
-
- tempDiffSum +=
- static_cast<uint32_t>(abs((int16_t)(currPixel - prevPixel)));
- pixelSum += static_cast<uint32_t>(currPixel);
- pixelSqSum += static_cast<uint64_t>(currPixel * currPixel);
- }
- }
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0)
- return VPM_OK;
-
- // Normalize over all pixels.
- float const tempDiffAvg =
- static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
- float const pixelSumAvg =
- static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
- float const pixelSqSumAvg =
- static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg / contrast;
- }
- return VPM_OK;
-}
-
-// Compute spatial metrics:
-// To reduce complexity, we compute the metric for a reduced set of points.
-// The spatial metrics are rough estimates of the prediction error cost for
-// each QM spatial mode: 2x2,1x2,2x1
-// The metrics are a simple estimate of the up-sampling prediction error,
-// estimated assuming sub-sampling for decimation (no filtering),
-// and up-sampling back up with simple bilinear interpolation.
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_C() {
- const int sizei = height_;
- const int sizej = width_;
-
- // Pixel mean square average: used to normalize the spatial metrics.
- uint32_t pixelMSA = 0;
-
- uint32_t spatialErrSum = 0;
- uint32_t spatialErrVSum = 0;
- uint32_t spatialErrHSum = 0;
-
- // make sure work section is a multiple of 16
- const int width_end = ((sizej - 2 * border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- int ssn1 = i * sizej + j;
- int ssn2 = (i + 1) * sizej + j; // bottom
- int ssn3 = (i - 1) * sizej + j; // top
- int ssn4 = i * sizej + j + 1; // right
- int ssn5 = i * sizej + j - 1; // left
-
- uint16_t refPixel1 = orig_frame_[ssn1] << 1;
- uint16_t refPixel2 = orig_frame_[ssn1] << 2;
-
- uint8_t bottPixel = orig_frame_[ssn2];
- uint8_t topPixel = orig_frame_[ssn3];
- uint8_t rightPixel = orig_frame_[ssn4];
- uint8_t leftPixel = orig_frame_[ssn5];
-
- spatialErrSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel2 - static_cast<uint16_t>(bottPixel + topPixel + leftPixel +
- rightPixel))));
- spatialErrVSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel1 - static_cast<uint16_t>(bottPixel + topPixel))));
- spatialErrHSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel1 - static_cast<uint16_t>(leftPixel + rightPixel))));
- pixelMSA += orig_frame_[ssn1];
- }
- }
-
- // Normalize over all pixels.
- const float spatialErr = static_cast<float>(spatialErrSum >> 2);
- const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
- const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
- const float norm = static_cast<float>(pixelMSA);
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
- return VPM_OK;
-}
-
-VideoContentMetrics* VPMContentAnalysis::ContentMetrics() {
- if (ca_Init_ == false)
- return NULL;
-
- content_metrics_->spatial_pred_err = spatial_pred_err_;
- content_metrics_->spatial_pred_err_h = spatial_pred_err_h_;
- content_metrics_->spatial_pred_err_v = spatial_pred_err_v_;
- // Motion metric: normalized temporal difference (MAD).
- content_metrics_->motion_magnitude = motion_magnitude_;
-
- return content_metrics_;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis.h b/chromium/third_party/webrtc/modules/video_processing/content_analysis.h
deleted file mode 100644
index d3a11bd091d..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_processing/include/video_processing_defines.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-
-class VPMContentAnalysis {
- public:
- // When |runtime_cpu_detection| is true, runtime selection of an optimized
- // code path is allowed.
- explicit VPMContentAnalysis(bool runtime_cpu_detection);
- ~VPMContentAnalysis();
-
- // Initialize ContentAnalysis - should be called prior to
- // extractContentFeature
- // Inputs: width, height
- // Return value: 0 if OK, negative value upon error
- int32_t Initialize(int width, int height);
-
- // Extract content Feature - main function of ContentAnalysis
- // Input: new frame
- // Return value: pointer to structure containing content Analysis
- // metrics or NULL value upon error
- VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
-
- // Release all allocated memory
- // Output: 0 if OK, negative value upon error
- int32_t Release();
-
- private:
- // return motion metrics
- VideoContentMetrics* ContentMetrics();
-
- // Normalized temporal difference metric: for motion magnitude
- typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)();
- TemporalDiffMetricFunc TemporalDiffMetric;
- int32_t TemporalDiffMetric_C();
-
- // Motion metric method: call 2 metrics (magnitude and size)
- int32_t ComputeMotionMetrics();
-
- // Spatial metric method: computes the 3 frame-average spatial
- // prediction errors (1x2,2x1,2x2)
- typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
- ComputeSpatialMetricsFunc ComputeSpatialMetrics;
- int32_t ComputeSpatialMetrics_C();
-
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- int32_t ComputeSpatialMetrics_SSE2();
- int32_t TemporalDiffMetric_SSE2();
-#endif
-
- const uint8_t* orig_frame_;
- uint8_t* prev_frame_;
- int width_;
- int height_;
- int skip_num_;
- int border_;
-
- // Content Metrics: Stores the local average of the metrics.
- float motion_magnitude_; // motion class
- float spatial_pred_err_; // spatial class
- float spatial_pred_err_h_; // spatial class
- float spatial_pred_err_v_; // spatial class
- bool first_frame_;
- bool ca_Init_;
-
- VideoContentMetrics* content_metrics_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc b/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc
deleted file mode 100644
index 7a60a89b454..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/content_analysis.h"
-
-#include <emmintrin.h>
-#include <math.h>
-
-namespace webrtc {
-
-int32_t VPMContentAnalysis::TemporalDiffMetric_SSE2() {
- uint32_t num_pixels = 0; // counter for # of pixels
- const uint8_t* imgBufO = orig_frame_ + border_ * width_ + border_;
- const uint8_t* imgBufP = prev_frame_ + border_ * width_ + border_;
-
- const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
-
- __m128i sad_64 = _mm_setzero_si128();
- __m128i sum_64 = _mm_setzero_si128();
- __m128i sqsum_64 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- for (uint16_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
- __m128i sqsum_32 = _mm_setzero_si128();
-
- const uint8_t* lineO = imgBufO;
- const uint8_t* lineP = imgBufP;
-
- // Work on 16 pixels at a time. For HD content with a width of 1920
- // this loop will run ~67 times (depending on border). Maximum for
- // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
- // results which are then accumulated. There is no chance of
- // rollover for these two accumulators.
- // o*o will have a maximum of 255*255 = 65025. This will roll over
- // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
- // 32 bit accumulator.
- for (uint16_t j = 0; j < width_end - border_; j += 16) {
- const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
- const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
-
- lineO += 16;
- lineP += 16;
-
- // Abs pixel difference between frames.
- sad_64 = _mm_add_epi64(sad_64, _mm_sad_epu8(o, p));
-
- // sum of all pixels in frame
- sum_64 = _mm_add_epi64(sum_64, _mm_sad_epu8(o, z));
-
- // Squared sum of all pixels in frame.
- const __m128i olo = _mm_unpacklo_epi8(o, z);
- const __m128i ohi = _mm_unpackhi_epi8(o, z);
-
- const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
- const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
-
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
- }
-
- // Add to 64 bit running sum as to not roll over.
- sqsum_64 =
- _mm_add_epi64(sqsum_64, _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32, z),
- _mm_unpacklo_epi32(sqsum_32, z)));
-
- imgBufO += width_ * skip_num_;
- imgBufP += width_ * skip_num_;
- num_pixels += (width_end - border_);
- }
-
- __m128i sad_final_128;
- __m128i sum_final_128;
- __m128i sqsum_final_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128(&sad_final_128, sad_64);
- _mm_store_si128(&sum_final_128, sum_64);
- _mm_store_si128(&sqsum_final_128, sqsum_64);
-
- uint64_t* sad_final_64 = reinterpret_cast<uint64_t*>(&sad_final_128);
- uint64_t* sum_final_64 = reinterpret_cast<uint64_t*>(&sum_final_128);
- uint64_t* sqsum_final_64 = reinterpret_cast<uint64_t*>(&sqsum_final_128);
-
- const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1];
- const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
- const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1];
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0)
- return VPM_OK;
-
- // Normalize over all pixels.
- const float tempDiffAvg =
- static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
- const float pixelSumAvg =
- static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
- const float pixelSqSumAvg =
- static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg / contrast;
- }
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_SSE2() {
- const uint8_t* imgBuf = orig_frame_ + border_ * width_;
- const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
-
- __m128i se_32 = _mm_setzero_si128();
- __m128i sev_32 = _mm_setzero_si128();
- __m128i seh_32 = _mm_setzero_si128();
- __m128i msa_32 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- // Error is accumulated as a 32 bit value. Looking at HD content with a
- // height of 1080 lines, or about 67 macro blocks. If the 16 bit row
- // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
- // will not roll over a 32 bit accumulator.
- // skip_num_ is also used to reduce the number of rows
- for (int32_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
- __m128i se_16 = _mm_setzero_si128();
- __m128i sev_16 = _mm_setzero_si128();
- __m128i seh_16 = _mm_setzero_si128();
- __m128i msa_16 = _mm_setzero_si128();
-
- // Row error is accumulated as a 16 bit value. There are 8
- // accumulators. Max value of a 16 bit number is 65529. Looking
- // at HD content, 1080p, has a width of 1920, 120 macro blocks.
- // A mb at a time is processed at a time. Absolute max error at
- // a point would be abs(0-255+255+255+255) which equals 1020.
- // 120*1020 = 122400. The probability of hitting this is quite low
- // on well behaved content. A specially crafted image could roll over.
- // border_ could also be adjusted to concentrate on just the center of
- // the images for an HD capture in order to reduce the possiblity of
- // rollover.
- const uint8_t* lineTop = imgBuf - width_ + border_;
- const uint8_t* lineCen = imgBuf + border_;
- const uint8_t* lineBot = imgBuf + width_ + border_;
-
- for (int32_t j = 0; j < width_end - border_; j += 16) {
- const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
- const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
- const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
- const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
- const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
-
- lineTop += 16;
- lineCen += 16;
- lineBot += 16;
-
- // center pixel unpacked
- __m128i clo = _mm_unpacklo_epi8(c, z);
- __m128i chi = _mm_unpackhi_epi8(c, z);
-
- // left right pixels unpacked and added together
- const __m128i lrlo =
- _mm_add_epi16(_mm_unpacklo_epi8(l, z), _mm_unpacklo_epi8(r, z));
- const __m128i lrhi =
- _mm_add_epi16(_mm_unpackhi_epi8(l, z), _mm_unpackhi_epi8(r, z));
-
- // top & bottom pixels unpacked and added together
- const __m128i tblo =
- _mm_add_epi16(_mm_unpacklo_epi8(t, z), _mm_unpacklo_epi8(b, z));
- const __m128i tbhi =
- _mm_add_epi16(_mm_unpackhi_epi8(t, z), _mm_unpackhi_epi8(b, z));
-
- // running sum of all pixels
- msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
- const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
- const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
- const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i setlo = _mm_subs_epi16(clo, _mm_add_epi16(lrlo, tblo));
- const __m128i sethi = _mm_subs_epi16(chi, _mm_add_epi16(lrhi, tbhi));
-
- // Add to 16 bit running sum
- se_16 =
- _mm_add_epi16(se_16, _mm_max_epi16(setlo, _mm_subs_epi16(z, setlo)));
- se_16 =
- _mm_add_epi16(se_16, _mm_max_epi16(sethi, _mm_subs_epi16(z, sethi)));
- sev_16 = _mm_add_epi16(sev_16,
- _mm_max_epi16(sevtlo, _mm_subs_epi16(z, sevtlo)));
- sev_16 = _mm_add_epi16(sev_16,
- _mm_max_epi16(sevthi, _mm_subs_epi16(z, sevthi)));
- seh_16 = _mm_add_epi16(seh_16,
- _mm_max_epi16(sehtlo, _mm_subs_epi16(z, sehtlo)));
- seh_16 = _mm_add_epi16(seh_16,
- _mm_max_epi16(sehthi, _mm_subs_epi16(z, sehthi)));
- }
-
- // Add to 32 bit running sum as to not roll over.
- se_32 = _mm_add_epi32(se_32, _mm_add_epi32(_mm_unpackhi_epi16(se_16, z),
- _mm_unpacklo_epi16(se_16, z)));
- sev_32 =
- _mm_add_epi32(sev_32, _mm_add_epi32(_mm_unpackhi_epi16(sev_16, z),
- _mm_unpacklo_epi16(sev_16, z)));
- seh_32 =
- _mm_add_epi32(seh_32, _mm_add_epi32(_mm_unpackhi_epi16(seh_16, z),
- _mm_unpacklo_epi16(seh_16, z)));
- msa_32 =
- _mm_add_epi32(msa_32, _mm_add_epi32(_mm_unpackhi_epi16(msa_16, z),
- _mm_unpacklo_epi16(msa_16, z)));
-
- imgBuf += width_ * skip_num_;
- }
-
- __m128i se_128;
- __m128i sev_128;
- __m128i seh_128;
- __m128i msa_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128(&se_128, _mm_add_epi64(_mm_unpackhi_epi32(se_32, z),
- _mm_unpacklo_epi32(se_32, z)));
- _mm_store_si128(&sev_128, _mm_add_epi64(_mm_unpackhi_epi32(sev_32, z),
- _mm_unpacklo_epi32(sev_32, z)));
- _mm_store_si128(&seh_128, _mm_add_epi64(_mm_unpackhi_epi32(seh_32, z),
- _mm_unpacklo_epi32(seh_32, z)));
- _mm_store_si128(&msa_128, _mm_add_epi64(_mm_unpackhi_epi32(msa_32, z),
- _mm_unpacklo_epi32(msa_32, z)));
-
- uint64_t* se_64 = reinterpret_cast<uint64_t*>(&se_128);
- uint64_t* sev_64 = reinterpret_cast<uint64_t*>(&sev_128);
- uint64_t* seh_64 = reinterpret_cast<uint64_t*>(&seh_128);
- uint64_t* msa_64 = reinterpret_cast<uint64_t*>(&msa_128);
-
- const uint32_t spatialErrSum = se_64[0] + se_64[1];
- const uint32_t spatialErrVSum = sev_64[0] + sev_64[1];
- const uint32_t spatialErrHSum = seh_64[0] + seh_64[1];
- const uint32_t pixelMSA = msa_64[0] + msa_64[1];
-
- // Normalize over all pixels.
- const float spatialErr = static_cast<float>(spatialErrSum >> 2);
- const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
- const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
- const float norm = static_cast<float>(pixelMSA);
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
-
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
-
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
-
- return VPM_OK;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/deflickering.cc b/chromium/third_party/webrtc/modules/video_processing/deflickering.cc
deleted file mode 100644
index 0e936ce9b77..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/deflickering.cc
+++ /dev/null
@@ -1,402 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/deflickering.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/base/logging.h"
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/system_wrappers/include/sort.h"
-
-namespace webrtc {
-
-// Detection constants
-// (Q4) Maximum allowed deviation for detection.
-enum { kFrequencyDeviation = 39 };
-// (Q4) Minimum frequency that can be detected.
-enum { kMinFrequencyToDetect = 32 };
-// Number of flickers before we accept detection
-enum { kNumFlickerBeforeDetect = 2 };
-enum { kmean_valueScaling = 4 }; // (Q4) In power of 2
-// Dead-zone region in terms of pixel values
-enum { kZeroCrossingDeadzone = 10 };
-// Deflickering constants.
-// Compute the quantiles over 1 / DownsamplingFactor of the image.
-enum { kDownsamplingFactor = 8 };
-enum { kLog2OfDownsamplingFactor = 3 };
-
-// To generate in Matlab:
-// >> probUW16 = round(2^11 *
-// [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
-// >> fprintf('%d, ', probUW16)
-// Resolution reduced to avoid overflow when multiplying with the
-// (potentially) large number of pixels.
-const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {
- 102, 205, 410, 614, 819, 1024,
- 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
-
-// To generate in Matlab:
-// >> numQuants = 14; maxOnlyLength = 5;
-// >> weightUW16 = round(2^15 *
-// [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
-// >> fprintf('%d, %d,\n ', weightUW16);
-const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] = {
- 16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
-
-VPMDeflickering::VPMDeflickering() {
- Reset();
-}
-
-VPMDeflickering::~VPMDeflickering() {}
-
-void VPMDeflickering::Reset() {
- mean_buffer_length_ = 0;
- detection_state_ = 0;
- frame_rate_ = 0;
-
- memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
- memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
-
- // Initialize the history with a uniformly distributed histogram.
- quant_hist_uw8_[0][0] = 0;
- quant_hist_uw8_[0][kNumQuants - 1] = 255;
- for (int32_t i = 0; i < kNumProbs; i++) {
- // Unsigned round. <Q0>
- quant_hist_uw8_[0][i + 1] =
- static_cast<uint8_t>((prob_uw16_[i] * 255 + (1 << 10)) >> 11);
- }
-
- for (int32_t i = 1; i < kFrameHistory_size; i++) {
- memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0],
- sizeof(uint8_t) * kNumQuants);
- }
-}
-
-int32_t VPMDeflickering::ProcessFrame(VideoFrame* frame,
- VideoProcessing::FrameStats* stats) {
- assert(frame);
- uint32_t frame_memory;
- uint8_t quant_uw8[kNumQuants];
- uint8_t maxquant_uw8[kNumQuants];
- uint8_t minquant_uw8[kNumQuants];
- uint16_t target_quant_uw16[kNumQuants];
- uint16_t increment_uw16;
- uint8_t map_uw8[256];
-
- uint16_t tmp_uw16;
- uint32_t tmp_uw32;
- int width = frame->width();
- int height = frame->height();
-
- if (frame->IsZeroSize()) {
- return VPM_GENERAL_ERROR;
- }
-
- // Stricter height check due to subsampling size calculation below.
- if (height < 2) {
- LOG(LS_ERROR) << "Invalid frame size.";
- return VPM_GENERAL_ERROR;
- }
-
- if (!VideoProcessing::ValidFrameStats(*stats)) {
- return VPM_GENERAL_ERROR;
- }
-
- if (PreDetection(frame->timestamp(), *stats) == -1)
- return VPM_GENERAL_ERROR;
-
- // Flicker detection
- int32_t det_flicker = DetectFlicker();
- if (det_flicker < 0) {
- return VPM_GENERAL_ERROR;
- } else if (det_flicker != 1) {
- return 0;
- }
-
- // Size of luminance component.
- const uint32_t y_size = height * width;
-
- const uint32_t y_sub_size =
- width * (((height - 1) >> kLog2OfDownsamplingFactor) + 1);
- uint8_t* y_sorted = new uint8_t[y_sub_size];
- uint32_t sort_row_idx = 0;
- for (int i = 0; i < height; i += kDownsamplingFactor) {
- memcpy(y_sorted + sort_row_idx * width, frame->buffer(kYPlane) + i * width,
- width);
- sort_row_idx++;
- }
-
- webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8);
-
- uint32_t prob_idx_uw32 = 0;
- quant_uw8[0] = 0;
- quant_uw8[kNumQuants - 1] = 255;
-
- // Ensure we won't get an overflow below.
- // In practice, the number of subsampled pixels will not become this large.
- if (y_sub_size > (1 << 21) - 1) {
- LOG(LS_ERROR) << "Subsampled number of pixels too large.";
- return -1;
- }
-
- for (int32_t i = 0; i < kNumProbs; i++) {
- // <Q0>.
- prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11;
- quant_uw8[i + 1] = y_sorted[prob_idx_uw32];
- }
-
- delete[] y_sorted;
- y_sorted = NULL;
-
- // Shift history for new frame.
- memmove(quant_hist_uw8_[1], quant_hist_uw8_[0],
- (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t));
- // Store current frame in history.
- memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t));
-
- // We use a frame memory equal to the ceiling of half the frame rate to
- // ensure we capture an entire period of flicker.
- frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
- // frame_rate_ in Q4.
- if (frame_memory > kFrameHistory_size) {
- frame_memory = kFrameHistory_size;
- }
-
- // Get maximum and minimum.
- for (int32_t i = 0; i < kNumQuants; i++) {
- maxquant_uw8[i] = 0;
- minquant_uw8[i] = 255;
- for (uint32_t j = 0; j < frame_memory; j++) {
- if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) {
- maxquant_uw8[i] = quant_hist_uw8_[j][i];
- }
-
- if (quant_hist_uw8_[j][i] < minquant_uw8[i]) {
- minquant_uw8[i] = quant_hist_uw8_[j][i];
- }
- }
- }
-
- // Get target quantiles.
- for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) {
- // target = w * maxquant_uw8 + (1 - w) * minquant_uw8
- // Weights w = |weight_uw16_| are in Q15, hence the final output has to be
- // right shifted by 8 to end up in Q7.
- target_quant_uw16[i] = static_cast<uint16_t>(
- (weight_uw16_[i] * maxquant_uw8[i] +
- ((1 << 15) - weight_uw16_[i]) * minquant_uw8[i]) >>
- 8); // <Q7>
- }
-
- for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) {
- target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7;
- }
-
- // Compute the map from input to output pixels.
- uint16_t mapUW16; // <Q7>
- for (int32_t i = 1; i < kNumQuants; i++) {
- // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here.
- tmp_uw32 =
- static_cast<uint32_t>(target_quant_uw16[i] - target_quant_uw16[i - 1]);
- tmp_uw16 = static_cast<uint16_t>(quant_uw8[i] - quant_uw8[i - 1]); // <Q0>
-
- if (tmp_uw16 > 0) {
- increment_uw16 =
- static_cast<uint16_t>(WebRtcSpl_DivU32U16(tmp_uw32,
- tmp_uw16)); // <Q7>
- } else {
- // The value is irrelevant; the loop below will only iterate once.
- increment_uw16 = 0;
- }
-
- mapUW16 = target_quant_uw16[i - 1];
- for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) {
- // Unsigned round. <Q0>
- map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7);
- mapUW16 += increment_uw16;
- }
- }
-
- // Map to the output frame.
- uint8_t* buffer = frame->buffer(kYPlane);
- for (uint32_t i = 0; i < y_size; i++) {
- buffer[i] = map_uw8[buffer[i]];
- }
-
- // Frame was altered, so reset stats.
- VideoProcessing::ClearFrameStats(stats);
-
- return VPM_OK;
-}
-
-/**
- Performs some pre-detection operations. Must be called before
- DetectFlicker().
-
- \param[in] timestamp Timestamp of the current frame.
- \param[in] stats Statistics of the current frame.
-
- \return 0: Success\n
- 2: Detection not possible due to flickering frequency too close to
- zero.\n
- -1: Error
-*/
-int32_t VPMDeflickering::PreDetection(
- const uint32_t timestamp,
- const VideoProcessing::FrameStats& stats) {
- int32_t mean_val; // Mean value of frame (Q4)
- uint32_t frame_rate = 0;
- int32_t meanBufferLength; // Temp variable.
-
- mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels);
- // Update mean value buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(mean_buffer_ + 1, mean_buffer_,
- (kMeanBufferLength - 1) * sizeof(int32_t));
- mean_buffer_[0] = mean_val;
-
- // Update timestamp buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(timestamp_buffer_ + 1, timestamp_buffer_,
- (kMeanBufferLength - 1) * sizeof(uint32_t));
- timestamp_buffer_[0] = timestamp;
-
- /* Compute current frame rate (Q4) */
- if (timestamp_buffer_[kMeanBufferLength - 1] != 0) {
- frame_rate = ((90000 << 4) * (kMeanBufferLength - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
-
- /* Determine required size of mean value buffer (mean_buffer_length_) */
- if (frame_rate == 0) {
- meanBufferLength = 1;
- } else {
- meanBufferLength =
- (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect;
- }
- /* Sanity check of buffer length */
- if (meanBufferLength >= kMeanBufferLength) {
- /* Too long buffer. The flickering frequency is too close to zero, which
- * makes the estimation unreliable.
- */
- mean_buffer_length_ = 0;
- return 2;
- }
- mean_buffer_length_ = meanBufferLength;
-
- if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) &&
- (mean_buffer_length_ != 1)) {
- frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
- frame_rate_ = frame_rate;
-
- return VPM_OK;
-}
-
-/**
- This function detects flicker in the video stream. As a side effect the
- mean value buffer is updated with the new mean value.
-
- \return 0: No flickering detected\n
- 1: Flickering detected\n
- 2: Detection not possible due to unreliable frequency interval
- -1: Error
-*/
-int32_t VPMDeflickering::DetectFlicker() {
- uint32_t i;
- int32_t freqEst; // (Q4) Frequency estimate to base detection upon
- int32_t ret_val = -1;
-
- /* Sanity check for mean_buffer_length_ */
- if (mean_buffer_length_ < 2) {
- /* Not possible to estimate frequency */
- return 2;
- }
- // Count zero crossings with a dead zone to be robust against noise. If the
- // noise std is 2 pixel this corresponds to about 95% confidence interval.
- int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4
- int32_t meanOfBuffer = 0; // Mean value of mean value buffer.
- int32_t numZeros = 0; // Number of zeros that cross the dead-zone.
- int32_t cntState = 0; // State variable for zero crossing regions.
- int32_t cntStateOld = 0; // Previous state for zero crossing regions.
-
- for (i = 0; i < mean_buffer_length_; i++) {
- meanOfBuffer += mean_buffer_[i];
- }
- meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation.
- meanOfBuffer /= mean_buffer_length_;
-
- // Count zero crossings.
- cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone));
- cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone));
- for (i = 1; i < mean_buffer_length_; i++) {
- cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone));
- cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone));
- if (cntStateOld == 0) {
- cntStateOld = -cntState;
- }
- if (((cntState + cntStateOld) == 0) && (cntState != 0)) {
- numZeros++;
- cntStateOld = cntState;
- }
- }
- // END count zero crossings.
-
- /* Frequency estimation according to:
- * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_;
- *
- * Resolution is set to Q4
- */
- freqEst = ((numZeros * 90000) << 3);
- freqEst /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
-
- /* Translate frequency estimate to regions close to 100 and 120 Hz */
- uint8_t freqState = 0; // Current translation state;
- // (0) Not in interval,
- // (1) Within valid interval,
- // (2) Out of range
- int32_t freqAlias = freqEst;
- if (freqEst > kMinFrequencyToDetect) {
- uint8_t aliasState = 1;
- while (freqState == 0) {
- /* Increase frequency */
- freqAlias += (aliasState * frame_rate_);
- freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
- /* Compute state */
- freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
- freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
- freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
- /* Switch alias state */
- aliasState++;
- aliasState &= 0x01;
- }
- }
- /* Is frequency estimate within detection region? */
- if (freqState == 1) {
- ret_val = 1;
- } else if (freqState == 0) {
- ret_val = 2;
- } else {
- ret_val = 0;
- }
- return ret_val;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/deflickering.h b/chromium/third_party/webrtc/modules/video_processing/deflickering.h
deleted file mode 100644
index 3ff2723aba8..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/deflickering.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
-
-#include <string.h> // NULL
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMDeflickering {
- public:
- VPMDeflickering();
- ~VPMDeflickering();
-
- void Reset();
- int32_t ProcessFrame(VideoFrame* frame, VideoProcessing::FrameStats* stats);
-
- private:
- int32_t PreDetection(uint32_t timestamp,
- const VideoProcessing::FrameStats& stats);
-
- int32_t DetectFlicker();
-
- enum { kMeanBufferLength = 32 };
- enum { kFrameHistory_size = 15 };
- enum { kNumProbs = 12 };
- enum { kNumQuants = kNumProbs + 2 };
- enum { kMaxOnlyLength = 5 };
-
- uint32_t mean_buffer_length_;
- uint8_t detection_state_; // 0: No flickering
- // 1: Flickering detected
- // 2: In flickering
- int32_t mean_buffer_[kMeanBufferLength];
- uint32_t timestamp_buffer_[kMeanBufferLength];
- uint32_t frame_rate_;
- static const uint16_t prob_uw16_[kNumProbs];
- static const uint16_t weight_uw16_[kNumQuants - kMaxOnlyLength];
- uint8_t quant_hist_uw8_[kFrameHistory_size][kNumQuants];
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
index fd0d0efb97d..100cdb519ab 100644
--- a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
@@ -15,29 +15,22 @@
namespace webrtc {
VPMFramePreprocessor::VPMFramePreprocessor()
- : content_metrics_(nullptr),
- resampled_frame_(),
- enable_ca_(false),
- frame_cnt_(0) {
+ : resampled_frame_(), frame_cnt_(0) {
spatial_resampler_ = new VPMSimpleSpatialResampler();
- ca_ = new VPMContentAnalysis(true);
vd_ = new VPMVideoDecimator();
- EnableDenosing(false);
+ EnableDenoising(false);
+ denoised_frame_toggle_ = 0;
}
VPMFramePreprocessor::~VPMFramePreprocessor() {
Reset();
- delete ca_;
delete vd_;
delete spatial_resampler_;
}
void VPMFramePreprocessor::Reset() {
- ca_->Release();
vd_->Reset();
- content_metrics_ = nullptr;
spatial_resampler_->Reset();
- enable_ca_ = false;
frame_cnt_ = 0;
}
@@ -45,10 +38,6 @@ void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) {
vd_->EnableTemporalDecimation(enable);
}
-void VPMFramePreprocessor::EnableContentAnalysis(bool enable) {
- enable_ca_ = enable;
-}
-
void VPMFramePreprocessor::SetInputFrameResampleMode(
VideoFrameResampling resampling_mode) {
spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
@@ -70,15 +59,6 @@ int32_t VPMFramePreprocessor::SetTargetResolution(uint32_t width,
return VPM_OK;
}
-void VPMFramePreprocessor::SetTargetFramerate(int frame_rate) {
- if (frame_rate == -1) {
- vd_->EnableTemporalDecimation(false);
- } else {
- vd_->EnableTemporalDecimation(true);
- vd_->SetTargetFramerate(frame_rate);
- }
-}
-
void VPMFramePreprocessor::UpdateIncomingframe_rate() {
vd_->UpdateIncomingframe_rate();
}
@@ -95,7 +75,7 @@ uint32_t VPMFramePreprocessor::GetDecimatedHeight() const {
return spatial_resampler_->TargetHeight();
}
-void VPMFramePreprocessor::EnableDenosing(bool enable) {
+void VPMFramePreprocessor::EnableDenoising(bool enable) {
if (enable) {
denoiser_.reset(new VideoDenoiser(true));
} else {
@@ -116,9 +96,18 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
const VideoFrame* current_frame = &frame;
if (denoiser_) {
- denoiser_->DenoiseFrame(*current_frame, &denoised_frame_,
- &denoised_frame_prev_, 0);
- current_frame = &denoised_frame_;
+ VideoFrame* denoised_frame = &denoised_frame_[0];
+ VideoFrame* denoised_frame_prev = &denoised_frame_[1];
+ // Swap the buffer to save one memcpy in DenoiseFrame.
+ if (denoised_frame_toggle_) {
+ denoised_frame = &denoised_frame_[1];
+ denoised_frame_prev = &denoised_frame_[0];
+ }
+ // Invert the flag.
+ denoised_frame_toggle_ ^= 1;
+ denoiser_->DenoiseFrame(*current_frame, denoised_frame, denoised_frame_prev,
+ true);
+ current_frame = denoised_frame;
}
if (spatial_resampler_->ApplyResample(current_frame->width(),
@@ -130,18 +119,8 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
current_frame = &resampled_frame_;
}
- // Perform content analysis on the frame to be encoded.
- if (enable_ca_ && frame_cnt_ % kSkipFrameCA == 0) {
- // Compute new metrics every |kSkipFramesCA| frames, starting with
- // the first frame.
- content_metrics_ = ca_->ComputeContentMetrics(*current_frame);
- }
++frame_cnt_;
return current_frame;
}
-VideoContentMetrics* VPMFramePreprocessor::GetContentMetrics() const {
- return content_metrics_;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
index c35dd0d7aff..4ac6b76e830 100644
--- a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
@@ -14,7 +14,6 @@
#include <memory>
#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/content_analysis.h"
#include "webrtc/modules/video_processing/spatial_resampler.h"
#include "webrtc/modules/video_processing/video_decimator.h"
#include "webrtc/typedefs.h"
@@ -38,17 +37,11 @@ class VPMFramePreprocessor {
void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
- // Enable content analysis.
- void EnableContentAnalysis(bool enable);
-
// Set target resolution: frame rate and dimension.
int32_t SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate);
- // Set target frame rate.
- void SetTargetFramerate(int frame_rate);
-
// Update incoming frame rate/dimension.
void UpdateIncomingframe_rate();
@@ -60,24 +53,20 @@ class VPMFramePreprocessor {
uint32_t GetDecimatedHeight() const;
// Preprocess output:
- void EnableDenosing(bool enable);
+ void EnableDenoising(bool enable);
const VideoFrame* PreprocessFrame(const VideoFrame& frame);
- VideoContentMetrics* GetContentMetrics() const;
private:
// The content does not change so much every frame, so to reduce complexity
// we can compute new content metrics every |kSkipFrameCA| frames.
enum { kSkipFrameCA = 2 };
- VideoContentMetrics* content_metrics_;
- VideoFrame denoised_frame_;
- VideoFrame denoised_frame_prev_;
+ VideoFrame denoised_frame_[2];
VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
- VPMContentAnalysis* ca_;
VPMVideoDecimator* vd_;
std::unique_ptr<VideoDenoiser> denoiser_;
- bool enable_ca_;
+ uint8_t denoised_frame_toggle_;
uint32_t frame_cnt_;
};
diff --git a/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h b/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
index a8d63588876..e2069ddbe7a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
+++ b/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
@@ -28,46 +28,9 @@ namespace webrtc {
class VideoProcessing {
public:
- struct FrameStats {
- uint32_t hist[256]; // Frame histogram.
- uint32_t mean;
- uint32_t sum;
- uint32_t num_pixels;
- uint32_t sub_sampling_factor; // Sub-sampling factor, in powers of 2.
- };
-
- enum BrightnessWarning { kNoWarning, kDarkWarning, kBrightWarning };
-
static VideoProcessing* Create();
virtual ~VideoProcessing() {}
- // Retrieves statistics for the input frame. This function must be used to
- // prepare a FrameStats struct for use in certain VPM functions.
- static void GetFrameStats(const VideoFrame& frame, FrameStats* stats);
-
- // Checks the validity of a FrameStats struct. Currently, valid implies only
- // that is had changed from its initialized state.
- static bool ValidFrameStats(const FrameStats& stats);
-
- static void ClearFrameStats(FrameStats* stats);
-
- // Increases/decreases the luminance value. 'delta' can be in the range {}
- static void Brighten(int delta, VideoFrame* frame);
-
- // Detects and removes camera flicker from a video stream. Every frame from
- // the stream must be passed in. A frame will only be altered if flicker has
- // been detected. Has a fixed-point implementation.
- // Frame statistics provided by GetFrameStats(). On return the stats will
- // be reset to zero if the frame was altered. Call GetFrameStats() again
- // if the statistics for the altered frame are required.
- virtual int32_t Deflickering(VideoFrame* frame, FrameStats* stats) = 0;
-
- // Detects if a video frame is excessively bright or dark. Returns a
- // warning if this is the case. Multiple frames should be passed in before
- // expecting a warning. Has a floating-point implementation.
- virtual int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) = 0;
-
// The following functions refer to the pre-processor unit within VPM. The
// pre-processor perfoms spatial/temporal decimation and content analysis on
// the frames prior to encoding.
@@ -79,8 +42,6 @@ class VideoProcessing {
uint32_t height,
uint32_t frame_rate) = 0;
- virtual void SetTargetFramerate(int frame_rate) = 0;
-
virtual uint32_t GetDecimatedFrameRate() = 0;
virtual uint32_t GetDecimatedWidth() const = 0;
virtual uint32_t GetDecimatedHeight() const = 0;
@@ -90,11 +51,8 @@ class VideoProcessing {
virtual void SetInputFrameResampleMode(
VideoFrameResampling resampling_mode) = 0;
- virtual void EnableDenosing(bool enable) = 0;
+ virtual void EnableDenoising(bool enable) = 0;
virtual const VideoFrame* PreprocessFrame(const VideoFrame& frame) = 0;
-
- virtual VideoContentMetrics* GetContentMetrics() const = 0;
- virtual void EnableContentAnalysis(bool enable) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc
deleted file mode 100644
index abce518e584..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-#define MAYBE_BrightnessDetection DISABLED_BrightnessDetection
-#else
-#define MAYBE_BrightnessDetection BrightnessDetection
-#endif
-TEST_F(VideoProcessingTest, MAYBE_BrightnessDetection) {
- uint32_t frameNum = 0;
- int32_t brightnessWarning = 0;
- uint32_t warningCount = 0;
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- if (brightnessWarning != VideoProcessing::kNoWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect few warnings
- float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("\nWarning proportions:\n");
- printf("Stock foreman: %.1f %%\n", warningProportion);
- EXPECT_LT(warningProportion, 10);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ &&
- frameNum < 300) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* frame = video_frame_.buffer(kYPlane);
- uint32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
- yTmp = frame[yIdx] << 1;
- if (yTmp > 255) {
- yTmp = 255;
- }
- frame[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- EXPECT_NE(VideoProcessing::kDarkWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessing::kBrightWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many brightness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Bright foreman: %.1f %%\n", warningProportion);
- EXPECT_GT(warningProportion, 95);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ &&
- frameNum < 300) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* y_plane = video_frame_.buffer(kYPlane);
- int32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
- yTmp = y_plane[yIdx] >> 1;
- y_plane[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- EXPECT_NE(VideoProcessing::kBrightWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessing::kDarkWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many darkness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Dark foreman: %.1f %%\n\n", warningProportion);
- EXPECT_GT(warningProportion, 90);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc
deleted file mode 100644
index 80bb56489bb..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/content_analysis.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_ContentAnalysis) {
-#else
-TEST_F(VideoProcessingTest, ContentAnalysis) {
-#endif
- VPMContentAnalysis ca__c(false);
- VPMContentAnalysis ca__sse(true);
- VideoContentMetrics* _cM_c;
- VideoContentMetrics* _cM_SSE;
-
- ca__c.Initialize(width_, height_);
- ca__sse.Initialize(width_, height_);
-
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- // Using ConvertToI420 to add stride to the image.
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- _cM_c = ca__c.ComputeContentMetrics(video_frame_);
- _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);
-
- ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
- ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
- ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
- ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc
deleted file mode 100644
index 5ff5692cce9..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_Deflickering) {
-#else
-TEST_F(VideoProcessingTest, Deflickering) {
-#endif
- enum { NumRuns = 30 };
- uint32_t frameNum = 0;
- const uint32_t frame_rate = 15;
-
- int64_t min_runtime = 0;
- int64_t avg_runtime = 0;
-
- // Close automatically opened Foreman.
- fclose(source_file_);
- const std::string input_file =
- webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
- source_file_ = fopen(input_file.c_str(), "rb");
- ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file: " << input_file
- << "\n";
-
- const std::string output_file =
- webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
- FILE* deflickerFile = fopen(output_file.c_str(), "wb");
- ASSERT_TRUE(deflickerFile != NULL)
- << "Could not open output file: " << output_file << "\n";
-
- printf("\nRun time [us / frame]:\n");
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
- TickTime t0;
- TickTime t1;
- TickInterval acc_ticks;
- uint32_t timeStamp = 1;
-
- frameNum = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- frameNum++;
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- video_frame_.set_timestamp(timeStamp);
-
- t0 = TickTime::Now();
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
- t1 = TickTime::Now();
- acc_ticks += (t1 - t0);
-
- if (run_idx == 0) {
- if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
- return;
- }
- }
- timeStamp += (90000 / frame_rate);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
- if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) {
- min_runtime = acc_ticks.Microseconds();
- }
- avg_runtime += acc_ticks.Microseconds();
-
- rewind(source_file_);
- }
- ASSERT_EQ(0, fclose(deflickerFile));
- // TODO(kjellander): Add verification of deflicker output file.
-
- printf("\nAverage run time = %d us / frame\n",
- static_cast<int>(avg_runtime / frameNum / NumRuns));
- printf("Min run time = %d us / frame\n\n",
- static_cast<int>(min_runtime / frameNum));
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
index a45f933bb54..4c13a05d635 100644
--- a/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -31,18 +31,10 @@ TEST_F(VideoProcessingTest, CopyMem) {
}
}
- memset(dst, 0, 8 * 8);
- df_c->CopyMem8x8(src, 8, dst, 8);
- EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
-
memset(dst, 0, 16 * 16);
df_c->CopyMem16x16(src, 16, dst, 16);
EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
- memset(dst, 0, 8 * 8);
- df_sse_neon->CopyMem16x16(src, 8, dst, 8);
- EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
-
memset(dst, 0, 16 * 16);
df_sse_neon->CopyMem16x16(src, 16, dst, 16);
EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
@@ -87,10 +79,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| >= |4 + shift_inc1|
@@ -101,10 +92,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| >= 8
@@ -115,10 +105,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| > 15
@@ -130,22 +119,23 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
memset(dst, 0, 16 * 16);
DenoiserDecision decision =
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
EXPECT_EQ(COPY_BLOCK, decision);
- decision =
- df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ decision = df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
EXPECT_EQ(COPY_BLOCK, decision);
}
TEST_F(VideoProcessingTest, Denoiser) {
+ // Used in swap buffer.
+ int denoised_frame_toggle = 0;
// Create pure C denoiser.
VideoDenoiser denoiser_c(false);
// Create SSE or NEON denoiser.
VideoDenoiser denoiser_sse_neon(true);
VideoFrame denoised_frame_c;
- VideoFrame denoised_frame_track_c;
+ VideoFrame denoised_frame_prev_c;
VideoFrame denoised_frame_sse_neon;
- VideoFrame denoised_frame_track_sse_neon;
+ VideoFrame denoised_frame_prev_sse_neon;
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
@@ -154,13 +144,25 @@ TEST_F(VideoProcessingTest, Denoiser) {
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
- denoiser_c.DenoiseFrame(video_frame_, &denoised_frame_c,
- &denoised_frame_track_c, -1);
- denoiser_sse_neon.DenoiseFrame(video_frame_, &denoised_frame_sse_neon,
- &denoised_frame_track_sse_neon, -1);
-
+ VideoFrame* p_denoised_c = &denoised_frame_c;
+ VideoFrame* p_denoised_prev_c = &denoised_frame_prev_c;
+ VideoFrame* p_denoised_sse_neon = &denoised_frame_sse_neon;
+ VideoFrame* p_denoised_prev_sse_neon = &denoised_frame_prev_sse_neon;
+ // Swap the buffer to save one memcpy in DenoiseFrame.
+ if (denoised_frame_toggle) {
+ p_denoised_c = &denoised_frame_prev_c;
+ p_denoised_prev_c = &denoised_frame_c;
+ p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
+ p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
+ }
+ denoiser_c.DenoiseFrame(video_frame_, p_denoised_c, p_denoised_prev_c,
+ false);
+ denoiser_sse_neon.DenoiseFrame(video_frame_, p_denoised_sse_neon,
+ p_denoised_prev_sse_neon, false);
+ // Invert the flag.
+ denoised_frame_toggle ^= 1;
// Denoising results should be the same for C and SSE/NEON denoiser.
- ASSERT_TRUE(test::FramesEqual(denoised_frame_c, denoised_frame_sse_neon));
+ ASSERT_TRUE(test::FramesEqual(*p_denoised_c, *p_denoised_sse_neon));
}
ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc b/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
index 0d18d0a4c89..9e61b51884f 100644
--- a/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -15,8 +15,8 @@
#include <memory>
#include <string>
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -51,8 +51,6 @@ static void TestSize(const VideoFrame& source_frame,
int target_height,
double expected_psnr,
VideoProcessing* vpm);
-static bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2);
static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed);
@@ -73,9 +71,12 @@ void VideoProcessingTest::SetUp() {
video_frame_.CreateEmptyFrame(width_, height_, width_,
half_width_, half_width_);
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
- memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
- memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
- memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
+ video_frame_.allocated_size(kYPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
+ video_frame_.allocated_size(kUPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
+ video_frame_.allocated_size(kVPlane));
const std::string video_file =
webrtc::test::ResourcePath("foreman_cif", "yuv");
source_file_ = fopen(video_file.c_str(), "rb");
@@ -93,108 +94,6 @@ void VideoProcessingTest::TearDown() {
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) {
-#else
-TEST_F(VideoProcessingTest, HandleNullBuffer) {
-#endif
- // TODO(mikhal/stefan): Do we need this one?
- VideoProcessing::FrameStats stats;
- // Video frame with unallocated buffer.
- VideoFrame videoFrame;
-
- vp_->GetFrameStats(videoFrame, &stats);
- EXPECT_EQ(stats.num_pixels, 0u);
-
- EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats));
-
- EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) {
-#else
-TEST_F(VideoProcessingTest, HandleBadStats) {
-#endif
- VideoProcessing::FrameStats stats;
- vp_->ClearFrameStats(&stats);
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats));
-
- EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) {
-#else
-TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) {
-#endif
- VideoFrame video_frame2;
- VideoProcessing::FrameStats stats;
- // Only testing non-static functions here.
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- video_frame2.CopyFrame(video_frame_);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
-
- // Retrieve frame stats again in case Deflickering() has zeroed them.
- vp_->GetFrameStats(video_frame2, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame2, &stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- video_frame2.CopyFrame(video_frame_);
- ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats));
-
- ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_FrameStats) {
-#else
-TEST_F(VideoProcessingTest, FrameStats) {
-#endif
- VideoProcessing::FrameStats stats;
- vp_->ClearFrameStats(&stats);
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_FALSE(vp_->ValidFrameStats(stats));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- EXPECT_TRUE(vp_->ValidFrameStats(stats));
-
- printf("\nFrameStats\n");
- printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n",
- static_cast<unsigned int>(stats.mean),
- static_cast<unsigned int>(stats.num_pixels),
- static_cast<unsigned int>(stats.sub_sampling_factor),
- static_cast<unsigned int>(stats.sum));
-
- vp_->ClearFrameStats(&stats);
- EXPECT_FALSE(vp_->ValidFrameStats(stats));
-}
-
-#if defined(WEBRTC_IOS)
TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) {
#else
TEST_F(VideoProcessingTest, PreprocessorLogic) {
@@ -230,8 +129,6 @@ TEST_F(VideoProcessingTest, Resampler) {
rewind(source_file_);
ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n";
- // CA not needed here
- vp_->EnableContentAnalysis(false);
// no temporal decimation
vp_->EnableTemporalDecimation(false);
@@ -248,11 +145,12 @@ TEST_F(VideoProcessingTest, Resampler) {
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
// Initiate test timer.
- const TickTime time_start = TickTime::Now();
+ const int64_t time_start = rtc::TimeNanos();
// Init the sourceFrame with a timestamp.
- video_frame_.set_render_time_ms(time_start.MillisecondTimestamp());
- video_frame_.set_timestamp(time_start.MillisecondTimestamp() * 90);
+ int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
+ video_frame_.set_render_time_ms(time_start_ms);
+ video_frame_.set_timestamp(time_start_ms * 90);
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Pure scaling:
@@ -295,7 +193,8 @@ TEST_F(VideoProcessingTest, Resampler) {
TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
// Stop timer.
- const int64_t runtime = (TickTime::Now() - time_start).Microseconds();
+ const int64_t runtime =
+ (rtc::TimeNanos() - time_start) / rtc::kNumNanosecsPerMicrosec;
if (runtime < min_runtime || run_idx == 0) {
min_runtime = runtime;
}
@@ -378,22 +277,6 @@ void TestSize(const VideoFrame& source_frame,
target_height);
}
-bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2) {
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane++) {
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
- int allocated_size1 = frame1.allocated_size(plane_type);
- int allocated_size2 = frame2.allocated_size(plane_type);
- if (allocated_size1 != allocated_size2)
- return false;
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
- return false;
- }
- return true;
-}
-
void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed) {
// Skip if writing to files is not enabled.
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
index b111a0e4123..376dec74a35 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
@@ -45,14 +45,6 @@ std::unique_ptr<DenoiserFilter> DenoiserFilter::Create(
filter.reset(new DenoiserFilterNEON());
if (cpu_type != nullptr)
*cpu_type = CPU_NEON;
-#elif defined(WEBRTC_DETECT_NEON)
- if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
- filter.reset(new DenoiserFilterNEON());
- if (cpu_type != nullptr)
- *cpu_type = CPU_NEON;
- } else {
- filter.reset(new DenoiserFilterC());
- }
#else
filter.reset(new DenoiserFilterC());
#endif
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
index f2c7570083d..1254a88d3c9 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
@@ -25,12 +25,6 @@ extern const int kSumDiffThresholdHigh;
enum DenoiserDecision { COPY_BLOCK, FILTER_BLOCK };
enum CpuType { CPU_NEON, CPU_NOT_NEON };
-struct DenoiseMetrics {
- uint32_t var;
- uint32_t sad;
- uint8_t denoise;
- bool is_skin;
-};
class DenoiserFilter {
public:
@@ -43,10 +37,6 @@ class DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) = 0;
- virtual void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) = 0;
virtual uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -59,8 +49,7 @@ class DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) = 0;
+ int increase_denoising) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
index 8c84f4989c2..1b3c0b70987 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
@@ -25,17 +25,6 @@ void DenoiserFilterC::CopyMem16x16(const uint8_t* src,
}
}
-void DenoiserFilterC::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- for (int i = 0; i < 8; i++) {
- memcpy(dst, src, 8);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -66,8 +55,7 @@ DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
int sum_diff_thresh = 0;
int sum_diff = 0;
int adj_val[3] = {3, 4, 6};
@@ -137,60 +125,10 @@ DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y,
sum_diff += col_sum[c];
}
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (abs(sum_diff) > sum_diff_thresh) {
- int delta = ((abs(sum_diff) - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_avg_y_stride * 16;
- running_avg_y -= avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- for (int c = 0; c < 16; ++c) {
- int diff = mc_running_avg_y[c] - sig[c];
- int adjustment = abs(diff);
- if (adjustment > delta)
- adjustment = delta;
- if (diff > 0) {
- // Bring denoised signal down.
- if (running_avg_y[c] - adjustment < 0)
- running_avg_y[c] = 0;
- else
- running_avg_y[c] = running_avg_y[c] - adjustment;
- col_sum[c] -= adjustment;
- } else if (diff < 0) {
- // Bring denoised signal up.
- if (running_avg_y[c] + adjustment > 255)
- running_avg_y[c] = 255;
- else
- running_avg_y[c] = running_avg_y[c] + adjustment;
- col_sum[c] += adjustment;
- }
- }
- sig += sig_stride;
- mc_running_avg_y += mc_avg_y_stride;
- running_avg_y += avg_y_stride;
- }
-
- sum_diff = 0;
- for (int c = 0; c < 16; ++c) {
- if (col_sum[c] >= 128) {
- col_sum[c] = 127;
- }
- sum_diff += col_sum[c];
- }
-
- if (abs(sum_diff) > sum_diff_thresh)
- return COPY_BLOCK;
- } else {
- return COPY_BLOCK;
- }
- }
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold;
+ if (abs(sum_diff) > sum_diff_thresh)
+ return COPY_BLOCK;
return FILTER_BLOCK;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
index 3e52c3e47c9..d8b6c5eb797 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
@@ -22,10 +22,6 @@ class DenoiserFilterC : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterC : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
index 2920305f71b..68c94cbdb7a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
@@ -14,6 +14,8 @@
namespace webrtc {
+const int kSumDiffThresholdHighNeon = 600;
+
static int HorizontalAddS16x8(const int16x8_t v_16x8) {
const int32x4_t a = vpaddlq_s16(v_16x8);
const int64x2_t b = vpaddlq_s32(a);
@@ -75,20 +77,6 @@ void DenoiserFilterNEON::CopyMem16x16(const uint8_t* src,
}
}
-void DenoiserFilterNEON::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- uint8x8_t vtmp;
-
- for (int r = 0; r < 8; r++) {
- vtmp = vld1_u8(src);
- vst1_u8(dst, vtmp);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterNEON::Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -106,8 +94,7 @@ DenoiserDecision DenoiserFilterNEON::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
// If motion_magnitude is small, making the denoiser more aggressive by
// increasing the adjustment for each level, level1 adjustment is
// increased, the deltas stay the same.
@@ -190,92 +177,13 @@ DenoiserDecision DenoiserFilterNEON::MbDenoise(uint8_t* mc_running_avg_y,
}
// Too much adjustments => copy block.
- {
- int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total),
- vget_low_s64(v_sum_diff_total));
- int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (sum_diff > sum_diff_thresh) {
- // Before returning to copy the block (i.e., apply no denoising),
- // checK if we can still apply some (weaker) temporal filtering to
- // this block, that would otherwise not be denoised at all. Simplest
- // is to apply an additional adjustment to running_avg_y to bring it
- // closer to sig. The adjustment is capped by a maximum delta, and
- // chosen such that in most cases the resulting sum_diff will be
- // within the accceptable range given by sum_diff_thresh.
-
- // The delta is set by the excess of absolute pixel diff over the
- // threshold.
- int delta = ((sum_diff - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- const uint8x16_t k_delta = vmovq_n_u8(delta);
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_running_avg_y_stride * 16;
- running_avg_y -= running_avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- uint8x16_t v_running_avg_y = vld1q_u8(running_avg_y);
- const uint8x16_t v_sig = vld1q_u8(sig);
- const uint8x16_t v_mc_running_avg_y = vld1q_u8(mc_running_avg_y);
-
- // Calculate absolute difference and sign masks.
- const uint8x16_t v_abs_diff = vabdq_u8(v_sig, v_mc_running_avg_y);
- const uint8x16_t v_diff_pos_mask =
- vcltq_u8(v_sig, v_mc_running_avg_y);
- const uint8x16_t v_diff_neg_mask =
- vcgtq_u8(v_sig, v_mc_running_avg_y);
- // Clamp absolute difference to delta to get the adjustment.
- const uint8x16_t v_abs_adjustment = vminq_u8(v_abs_diff, (k_delta));
-
- const uint8x16_t v_pos_adjustment =
- vandq_u8(v_diff_pos_mask, v_abs_adjustment);
- const uint8x16_t v_neg_adjustment =
- vandq_u8(v_diff_neg_mask, v_abs_adjustment);
-
- v_running_avg_y = vqsubq_u8(v_running_avg_y, v_pos_adjustment);
- v_running_avg_y = vqaddq_u8(v_running_avg_y, v_neg_adjustment);
-
- // Store results.
- vst1q_u8(running_avg_y, v_running_avg_y);
-
- {
- const int8x16_t v_sum_diff =
- vqsubq_s8(vreinterpretq_s8_u8(v_neg_adjustment),
- vreinterpretq_s8_u8(v_pos_adjustment));
-
- const int16x8_t fe_dc_ba_98_76_54_32_10 = vpaddlq_s8(v_sum_diff);
- const int32x4_t fedc_ba98_7654_3210 =
- vpaddlq_s16(fe_dc_ba_98_76_54_32_10);
- const int64x2_t fedcba98_76543210 =
- vpaddlq_s32(fedc_ba98_7654_3210);
-
- v_sum_diff_total = vqaddq_s64(v_sum_diff_total, fedcba98_76543210);
- }
- // Update pointers for next iteration.
- sig += sig_stride;
- mc_running_avg_y += mc_running_avg_y_stride;
- running_avg_y += running_avg_y_stride;
- }
- {
- // Update the sum of all pixel differences of this MB.
- x = vqadd_s64(vget_high_s64(v_sum_diff_total),
- vget_low_s64(v_sum_diff_total));
- sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
-
- if (sum_diff > sum_diff_thresh) {
- return COPY_BLOCK;
- }
- }
- } else {
- return COPY_BLOCK;
- }
- }
- }
+ int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total),
+ vget_low_s64(v_sum_diff_total));
+ int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHighNeon : kSumDiffThreshold;
+ if (sum_diff > sum_diff_thresh)
+ return COPY_BLOCK;
// Tell above level that block was filtered.
running_avg_y -= running_avg_y_stride * 16;
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
index 2e3ea268290..55850bd1ea5 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
@@ -22,10 +22,6 @@ class DenoiserFilterNEON : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterNEON : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
index 614b6c94859..0545a97398e 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
@@ -9,7 +9,6 @@
*/
#include <emmintrin.h>
-
#include "webrtc/modules/video_processing/util/denoiser_filter_sse2.h"
namespace webrtc {
@@ -110,18 +109,6 @@ void DenoiserFilterSSE2::CopyMem16x16(const uint8_t* src,
}
}
-// TODO(jackychen): Optimize this function using SSE2.
-void DenoiserFilterSSE2::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- for (int i = 0; i < 8; i++) {
- memcpy(dst, src, 8);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src,
int src_stride,
const uint8_t* ref,
@@ -139,8 +126,8 @@ DenoiserDecision DenoiserFilterSSE2::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
+ DenoiserDecision decision = FILTER_BLOCK;
unsigned int sum_diff_thresh = 0;
int shift_inc =
(increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1
@@ -210,76 +197,13 @@ DenoiserDecision DenoiserFilterSSE2::MbDenoise(uint8_t* mc_running_avg_y,
running_avg_y += avg_y_stride;
}
- {
- // Compute the sum of all pixel differences of this MB.
- unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff);
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (abs_sum_diff > sum_diff_thresh) {
- // Before returning to copy the block (i.e., apply no denoising),
- // check if we can still apply some (weaker) temporal filtering to
- // this block, that would otherwise not be denoised at all. Simplest
- // is to apply an additional adjustment to running_avg_y to bring it
- // closer to sig. The adjustment is capped by a maximum delta, and
- // chosen such that in most cases the resulting sum_diff will be
- // within the acceptable range given by sum_diff_thresh.
-
- // The delta is set by the excess of absolute pixel diff over the
- // threshold.
- int delta = ((abs_sum_diff - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- const __m128i k_delta = _mm_set1_epi8(delta);
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_avg_y_stride * 16;
- running_avg_y -= avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- __m128i v_running_avg_y =
- _mm_loadu_si128(reinterpret_cast<__m128i*>(&running_avg_y[0]));
- // Calculate differences.
- const __m128i v_sig =
- _mm_loadu_si128(reinterpret_cast<const __m128i*>(&sig[0]));
- const __m128i v_mc_running_avg_y =
- _mm_loadu_si128(reinterpret_cast<__m128i*>(&mc_running_avg_y[0]));
- const __m128i pdiff = _mm_subs_epu8(v_mc_running_avg_y, v_sig);
- const __m128i ndiff = _mm_subs_epu8(v_sig, v_mc_running_avg_y);
- // Obtain the sign. FF if diff is negative.
- const __m128i diff_sign = _mm_cmpeq_epi8(pdiff, k_0);
- // Clamp absolute difference to delta to get the adjustment.
- const __m128i adj = _mm_min_epu8(_mm_or_si128(pdiff, ndiff), k_delta);
- // Restore the sign and get positive and negative adjustments.
- __m128i padj, nadj;
- padj = _mm_andnot_si128(diff_sign, adj);
- nadj = _mm_and_si128(diff_sign, adj);
- // Calculate filtered value.
- v_running_avg_y = _mm_subs_epu8(v_running_avg_y, padj);
- v_running_avg_y = _mm_adds_epu8(v_running_avg_y, nadj);
- _mm_storeu_si128(reinterpret_cast<__m128i*>(running_avg_y),
- v_running_avg_y);
-
- // Accumulate the adjustments.
- acc_diff = _mm_subs_epi8(acc_diff, padj);
- acc_diff = _mm_adds_epi8(acc_diff, nadj);
-
- // Update pointers for next iteration.
- sig += sig_stride;
- mc_running_avg_y += mc_avg_y_stride;
- running_avg_y += avg_y_stride;
- }
- abs_sum_diff = AbsSumDiff16x1(acc_diff);
- if (abs_sum_diff > sum_diff_thresh) {
- return COPY_BLOCK;
- }
- } else {
- return COPY_BLOCK;
- }
- }
- }
- return FILTER_BLOCK;
+ // Compute the sum of all pixel differences of this MB.
+ unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff);
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold;
+ if (abs_sum_diff > sum_diff_thresh)
+ decision = COPY_BLOCK;
+ return decision;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
index 395fa10eca0..731344c809c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
@@ -22,10 +22,6 @@ class DenoiserFilterSSE2 : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterSSE2 : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
index 87beac38ae5..3b0d59ef717 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
@@ -9,6 +9,9 @@
*/
#include "webrtc/modules/video_processing/util/noise_estimation.h"
+#if DISPLAYNEON
+#include <android/log.h>
+#endif
namespace webrtc {
@@ -27,10 +30,10 @@ void NoiseEstimation::GetNoise(int mb_index, uint32_t var, uint32_t luma) {
consec_low_var_[mb_index]++;
num_static_block_++;
if (consec_low_var_[mb_index] >= kConsecLowVarFrame &&
- (luma >> 8) < kAverageLumaMax && (luma >> 8) > kAverageLumaMin) {
+ (luma >> 6) < kAverageLumaMax && (luma >> 6) > kAverageLumaMin) {
// Normalized var by the average luma value, this gives more weight to
// darker blocks.
- int nor_var = var / (luma >> 12);
+ int nor_var = var / (luma >> 10);
noise_var_ +=
nor_var > kBlockSelectionVarMax ? kBlockSelectionVarMax : nor_var;
num_noisy_block_++;
@@ -46,25 +49,36 @@ void NoiseEstimation::UpdateNoiseLevel() {
// condition more reasonable.
// No enough samples implies the motion of the camera or too many moving
// objects in the frame.
- if (num_static_block_ < (0.65 * mb_cols_ * mb_rows_) || !num_noisy_block_) {
+ if (num_static_block_ <
+ (0.65 * mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL) ||
+ !num_noisy_block_) {
+#if DISPLAY
+ printf("Not enough samples. %d \n", num_static_block_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY",
+ "Not enough samples. %d \n", num_static_block_);
+#endif
noise_var_ = 0;
noise_var_accum_ = 0;
- num_static_block_ = 0;
num_noisy_block_ = 0;
-#if DISPLAY
- printf("Not enough samples.\n");
-#endif
+ num_static_block_ = 0;
return;
} else {
- // Normalized by the number of noisy blocks.
- noise_var_ /= num_noisy_block_;
- // Get the percentage of static blocks.
- percent_static_block_ =
- static_cast<double>(num_static_block_) / (mb_cols_ * mb_rows_);
#if DISPLAY
- printf("%d %d fraction = %.3f\n", num_static_block_, mb_cols_ * mb_rows_,
+ printf("%d %d fraction = %.3f\n", num_static_block_,
+ mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL,
percent_static_block_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY", "%d %d fraction = %.3f\n",
+ num_static_block_,
+ mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL,
+ percent_static_block_);
#endif
+ // Normalized by the number of noisy blocks.
+ noise_var_ /= num_noisy_block_;
+ // Get the percentage of static blocks.
+ percent_static_block_ = static_cast<double>(num_static_block_) /
+ (mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL);
num_noisy_block_ = 0;
num_static_block_ = 0;
}
@@ -75,12 +89,16 @@ void NoiseEstimation::UpdateNoiseLevel() {
} else {
noise_var_accum_ = (noise_var_accum_ * 15 + noise_var_) / 16;
}
- // Reset noise_var_ for the next frame.
- noise_var_ = 0;
#if DISPLAY
printf("noise_var_accum_ = %.1f, noise_var_ = %d.\n", noise_var_accum_,
noise_var_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY",
+ "noise_var_accum_ = %.1f, noise_var_ = %d.\n",
+ noise_var_accum_, noise_var_);
#endif
+ // Reset noise_var_ for the next frame.
+ noise_var_ = 0;
}
uint8_t NoiseEstimation::GetNoiseLevel() {
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
index ca5cc2324fb..294bfb3a731 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
@@ -11,28 +11,36 @@
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_processing/include/video_processing_defines.h"
#include "webrtc/modules/video_processing/util/denoiser_filter.h"
namespace webrtc {
-#define EXPERIMENTAL 0
-#define DISPLAY 0
+#define DISPLAY 0 // Rectangle diagnostics
+#define DISPLAYNEON 0 // Rectangle diagnostics on NEON
-const int kNoiseThreshold = 200;
+const int kNoiseThreshold = 150;
const int kNoiseThresholdNeon = 70;
const int kConsecLowVarFrame = 6;
const int kAverageLumaMin = 20;
const int kAverageLumaMax = 220;
const int kBlockSelectionVarMax = kNoiseThreshold << 1;
+// TODO(jackychen): To test different sampling strategy.
+// Collect noise data every NOISE_SUBSAMPLE_INTERVAL blocks.
+#define NOISE_SUBSAMPLE_INTERVAL 41
+
class NoiseEstimation {
public:
void Init(int width, int height, CpuType cpu_type);
+ // Collect noise data from one qualified block.
void GetNoise(int mb_index, uint32_t var, uint32_t luma);
+ // Reset the counter for consecutive low-var blocks.
void ResetConsecLowVar(int mb_index);
+ // Update noise level for current frame.
void UpdateNoiseLevel();
// 0: low noise, 1: high noise
uint8_t GetNoiseLevel();
@@ -42,13 +50,13 @@ class NoiseEstimation {
int height_;
int mb_rows_;
int mb_cols_;
+ int num_noisy_block_;
+ int num_static_block_;
CpuType cpu_type_;
uint32_t noise_var_;
double noise_var_accum_;
- int num_noisy_block_;
- int num_static_block_;
double percent_static_block_;
- rtc::scoped_ptr<uint32_t[]> consec_low_var_;
+ std::unique_ptr<uint32_t[]> consec_low_var_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc b/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
index 63e347b026e..c6623fa836c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
@@ -9,9 +9,9 @@
*/
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/video_decimator.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
@@ -95,7 +95,7 @@ bool VPMVideoDecimator::DropFrame() {
}
uint32_t VPMVideoDecimator::GetDecimatedFrameRate() {
- ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ ProcessIncomingframe_rate(rtc::TimeMillis());
if (!enable_temporal_decimation_) {
return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
}
@@ -104,12 +104,12 @@ uint32_t VPMVideoDecimator::GetDecimatedFrameRate() {
}
uint32_t VPMVideoDecimator::Inputframe_rate() {
- ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ ProcessIncomingframe_rate(rtc::TimeMillis());
return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
}
void VPMVideoDecimator::UpdateIncomingframe_rate() {
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
if (incoming_frame_times_[0] == 0) {
// First no shift.
} else {
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
index b00da5c90a1..f116f882cd6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
@@ -7,305 +7,347 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/video_denoiser.h"
namespace webrtc {
+#if DISPLAY || DISPLAYNEON
+static void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 8; i++) {
+ memcpy(dst, src, 8);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+static void ShowRect(const std::unique_ptr<DenoiserFilter>& filter,
+ const std::unique_ptr<uint8_t[]>& d_status,
+ const std::unique_ptr<uint8_t[]>& moving_edge_red,
+ const std::unique_ptr<uint8_t[]>& x_density,
+ const std::unique_ptr<uint8_t[]>& y_density,
+ const uint8_t* u_src,
+ const uint8_t* v_src,
+ uint8_t* u_dst,
+ uint8_t* v_dst,
+ int mb_rows_,
+ int mb_cols_,
+ int stride_u_,
+ int stride_v_) {
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ int mb_index = mb_row * mb_cols_ + mb_col;
+ const uint8_t* mb_src_u =
+ u_src + (mb_row << 3) * stride_u_ + (mb_col << 3);
+ const uint8_t* mb_src_v =
+ v_src + (mb_row << 3) * stride_v_ + (mb_col << 3);
+ uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u_ + (mb_col << 3);
+ uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v_ + (mb_col << 3);
+ uint8_t uv_tmp[8 * 8];
+ memset(uv_tmp, 200, 8 * 8);
+ if (d_status[mb_index] == 1) {
+ // Paint to red.
+ CopyMem8x8(mb_src_u, stride_u_, mb_dst_u, stride_u_);
+ CopyMem8x8(uv_tmp, 8, mb_dst_v, stride_v_);
+ } else if (moving_edge_red[mb_row * mb_cols_ + mb_col] &&
+ x_density[mb_col] * y_density[mb_row]) {
+ // Paint to blue.
+ CopyMem8x8(uv_tmp, 8, mb_dst_u, stride_u_);
+ CopyMem8x8(mb_src_v, stride_v_, mb_dst_v, stride_v_);
+ } else {
+ CopyMem8x8(mb_src_u, stride_u_, mb_dst_u, stride_u_);
+ CopyMem8x8(mb_src_v, stride_v_, mb_dst_v, stride_v_);
+ }
+ }
+ }
+}
+#endif
+
VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection)
: width_(0),
height_(0),
filter_(DenoiserFilter::Create(runtime_cpu_detection, &cpu_type_)),
ne_(new NoiseEstimation()) {}
-#if EXPERIMENTAL
-// Check the mb position(1: close to the center, 3: close to the border).
-static int PositionCheck(int mb_row, int mb_col, int mb_rows, int mb_cols) {
- if ((mb_row >= (mb_rows >> 3)) && (mb_row <= (7 * mb_rows >> 3)) &&
- (mb_col >= (mb_cols >> 3)) && (mb_col <= (7 * mb_cols >> 3)))
+void VideoDenoiser::DenoiserReset(const VideoFrame& frame,
+ VideoFrame* denoised_frame,
+ VideoFrame* denoised_frame_prev) {
+ width_ = frame.width();
+ height_ = frame.height();
+ mb_cols_ = width_ >> 4;
+ mb_rows_ = height_ >> 4;
+ stride_y_ = frame.video_frame_buffer()->StrideY();
+ stride_u_ = frame.video_frame_buffer()->StrideU();
+ stride_v_ = frame.video_frame_buffer()->StrideV();
+
+ // Allocate an empty buffer for denoised_frame_prev.
+ denoised_frame_prev->CreateEmptyFrame(width_, height_, stride_y_, stride_u_,
+ stride_v_);
+ // Allocate and initialize denoised_frame with key frame.
+ denoised_frame->CreateFrame(
+ frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->DataV(),
+ width_, height_, stride_y_, stride_u_, stride_v_, kVideoRotation_0);
+ // Set time parameters to the output frame.
+ denoised_frame->set_timestamp(frame.timestamp());
+ denoised_frame->set_render_time_ms(frame.render_time_ms());
+
+ // Init noise estimator and allocate buffers.
+ ne_->Init(width_, height_, cpu_type_);
+ moving_edge_.reset(new uint8_t[mb_cols_ * mb_rows_]);
+ mb_filter_decision_.reset(new DenoiserDecision[mb_cols_ * mb_rows_]);
+ x_density_.reset(new uint8_t[mb_cols_]);
+ y_density_.reset(new uint8_t[mb_rows_]);
+ moving_object_.reset(new uint8_t[mb_cols_ * mb_rows_]);
+}
+
+int VideoDenoiser::PositionCheck(int mb_row, int mb_col, int noise_level) {
+ if (noise_level == 0)
return 1;
- else if ((mb_row >= (mb_rows >> 4)) && (mb_row <= (15 * mb_rows >> 4)) &&
- (mb_col >= (mb_cols >> 4)) && (mb_col <= (15 * mb_cols >> 4)))
+ if ((mb_row <= (mb_rows_ >> 4)) || (mb_col <= (mb_cols_ >> 4)) ||
+ (mb_col >= (15 * mb_cols_ >> 4)))
+ return 3;
+ else if ((mb_row <= (mb_rows_ >> 3)) || (mb_col <= (mb_cols_ >> 3)) ||
+ (mb_col >= (7 * mb_cols_ >> 3)))
return 2;
else
- return 3;
+ return 1;
}
-static void ReduceFalseDetection(const std::unique_ptr<uint8_t[]>& d_status,
- std::unique_ptr<uint8_t[]>* d_status_tmp1,
- std::unique_ptr<uint8_t[]>* d_status_tmp2,
- int noise_level,
- int mb_rows,
- int mb_cols) {
- // Draft. This can be optimized. This code block is to reduce false detection
- // in moving object detection.
- int mb_row_min = noise_level ? mb_rows >> 3 : 1;
- int mb_col_min = noise_level ? mb_cols >> 3 : 1;
- int mb_row_max = noise_level ? (7 * mb_rows >> 3) : mb_rows - 2;
- int mb_col_max = noise_level ? (7 * mb_cols >> 3) : mb_cols - 2;
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Up left.
- for (int mb_row = mb_row_min; mb_row <= mb_row_max; ++mb_row) {
- for (int mb_col = mb_col_min; mb_col <= mb_col_max; ++mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row - 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col - 1]);
+void VideoDenoiser::ReduceFalseDetection(
+ const std::unique_ptr<uint8_t[]>& d_status,
+ std::unique_ptr<uint8_t[]>* moving_edge_red,
+ int noise_level) {
+ // From up left corner.
+ int mb_col_stop = mb_cols_ - 1;
+ for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) {
+ for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col - 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp2).get(), (*d_status_tmp1).get(), mb_rows * mb_cols);
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Bottom left.
- for (int mb_row = mb_row_max; mb_row >= mb_row_min; --mb_row) {
- for (int mb_col = mb_col_min; mb_col <= mb_col_max; ++mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row + 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col - 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From bottom left corner.
+ mb_col_stop = mb_cols_ - 1;
+ for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) {
+ for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col - 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Up right.
- for (int mb_row = mb_row_min; mb_row <= mb_row_max; ++mb_row) {
- for (int mb_col = mb_col_max; mb_col >= mb_col_min; --mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row - 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col + 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From up right corner.
+ mb_col_stop = 0;
+ for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) {
+ for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col + 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Bottom right.
- for (int mb_row = mb_row_max; mb_row >= mb_row_min; --mb_row) {
- for (int mb_col = mb_col_max; mb_col >= mb_col_min; --mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row + 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col + 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From bottom right corner.
+ mb_col_stop = 0;
+ for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) {
+ for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col + 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
}
-static bool TrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
- int mb_row,
- int mb_col,
- int mb_rows,
- int mb_cols) {
- int mb_index = mb_row * mb_cols + mb_col;
- if (!mb_row || !mb_col || mb_row == mb_rows - 1 || mb_col == mb_cols - 1)
- return false;
- return d_status[mb_index + 1] || d_status[mb_index - 1] ||
- d_status[mb_index + mb_cols] || d_status[mb_index - mb_cols];
+bool VideoDenoiser::IsTrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
+ int mb_row,
+ int mb_col) {
+ bool ret = false;
+ int mb_index = mb_row * mb_cols_ + mb_col;
+ if (!mb_row || !mb_col || mb_row == mb_rows_ - 1 || mb_col == mb_cols_ - 1)
+ ret = false;
+ else
+ ret = d_status[mb_index + 1] || d_status[mb_index - 1] ||
+ d_status[mb_index + mb_cols_] || d_status[mb_index - mb_cols_];
+ return ret;
}
-#endif
-#if DISPLAY
-void ShowRect(const std::unique_ptr<DenoiserFilter>& filter,
- const std::unique_ptr<uint8_t[]>& d_status,
- const std::unique_ptr<uint8_t[]>& d_status_tmp2,
- const std::unique_ptr<uint8_t[]>& x_density,
- const std::unique_ptr<uint8_t[]>& y_density,
- const uint8_t* u_src,
- const uint8_t* v_src,
- uint8_t* u_dst,
- uint8_t* v_dst,
- int mb_rows,
- int mb_cols,
- int stride_u,
- int stride_v) {
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- int mb_index = mb_row * mb_cols + mb_col;
- const uint8_t* mb_src_u =
- u_src + (mb_row << 3) * stride_u + (mb_col << 3);
- const uint8_t* mb_src_v =
- v_src + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u + (mb_col << 3);
- uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t y_tmp_255[8 * 8];
- memset(y_tmp_255, 200, 8 * 8);
- // x_density_[mb_col] * y_density_[mb_row]
- if (d_status[mb_index] == 1) {
- // Paint to red.
- filter->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter->CopyMem8x8(y_tmp_255, 8, mb_dst_v, stride_v);
-#if EXPERIMENTAL
- } else if (d_status_tmp2[mb_row * mb_cols + mb_col] &&
- x_density[mb_col] * y_density[mb_row]) {
-#else
- } else if (x_density[mb_col] * y_density[mb_row]) {
-#endif
- // Paint to blue.
- filter->CopyMem8x8(y_tmp_255, 8, mb_dst_u, stride_u);
- filter->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
- } else {
- filter->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
+void VideoDenoiser::CopySrcOnMOB(const uint8_t* y_src, uint8_t* y_dst) {
+ // Loop over to copy src block if the block is marked as moving object block
+ // or if the block may cause trailing artifacts.
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ const int mb_index_base = mb_row * mb_cols_;
+ const int offset_base = (mb_row << 4) * stride_y_;
+ const uint8_t* mb_src_base = y_src + offset_base;
+ uint8_t* mb_dst_base = y_dst + offset_base;
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ const int mb_index = mb_index_base + mb_col;
+ const uint32_t offset_col = mb_col << 4;
+ const uint8_t* mb_src = mb_src_base + offset_col;
+ uint8_t* mb_dst = mb_dst_base + offset_col;
+ // Check if the block is a moving object block or may cause a trailing
+ // artifacts.
+ if (mb_filter_decision_[mb_index] != FILTER_BLOCK ||
+ IsTrailingBlock(moving_edge_, mb_row, mb_col) ||
+ (x_density_[mb_col] * y_density_[mb_row] &&
+ moving_object_[mb_row * mb_cols_ + mb_col])) {
+ // Copy y source.
+ filter_->CopyMem16x16(mb_src, stride_y_, mb_dst, stride_y_);
+ }
+ }
+ }
+}
+
+void VideoDenoiser::CopyLumaOnMargin(const uint8_t* y_src, uint8_t* y_dst) {
+ if ((mb_rows_ << 4) != height_) {
+ const uint8_t* margin_y_src = y_src + (mb_rows_ << 4) * stride_y_;
+ uint8_t* margin_y_dst = y_dst + (mb_rows_ << 4) * stride_y_;
+ memcpy(margin_y_dst, margin_y_src, (height_ - (mb_rows_ << 4)) * stride_y_);
+ }
+ if ((mb_cols_ << 4) != width_) {
+ const uint8_t* margin_y_src = y_src + (mb_cols_ << 4);
+ uint8_t* margin_y_dst = y_dst + (mb_cols_ << 4);
+ for (int i = 0; i < height_; ++i) {
+ for (int j = mb_cols_ << 4; j < width_; ++j) {
+ margin_y_dst[i * stride_y_ + j] = margin_y_src[i * stride_y_ + j];
}
}
}
}
-#endif
void VideoDenoiser::DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev,
- int noise_level_prev) {
- int stride_y = frame.stride(kYPlane);
- int stride_u = frame.stride(kUPlane);
- int stride_v = frame.stride(kVPlane);
- // If previous width and height are different from current frame's, then no
- // denoising for the current frame.
+ bool noise_estimation_enabled) {
+ // If previous width and height are different from current frame's, need to
+ // reallocate the buffers and no denoising for the current frame.
if (width_ != frame.width() || height_ != frame.height()) {
- width_ = frame.width();
- height_ = frame.height();
- denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane),
- frame.buffer(kVPlane), width_, height_,
- stride_y, stride_u, stride_v, kVideoRotation_0);
- denoised_frame_prev->CreateFrame(
- frame.buffer(kYPlane), frame.buffer(kUPlane), frame.buffer(kVPlane),
- width_, height_, stride_y, stride_u, stride_v, kVideoRotation_0);
- // Setting time parameters to the output frame.
- denoised_frame->set_timestamp(frame.timestamp());
- denoised_frame->set_render_time_ms(frame.render_time_ms());
- ne_->Init(width_, height_, cpu_type_);
+ DenoiserReset(frame, denoised_frame, denoised_frame_prev);
return;
}
- // For 16x16 block.
- int mb_cols = width_ >> 4;
- int mb_rows = height_ >> 4;
- if (metrics_.get() == nullptr)
- metrics_.reset(new DenoiseMetrics[mb_cols * mb_rows]());
- if (d_status_.get() == nullptr) {
- d_status_.reset(new uint8_t[mb_cols * mb_rows]());
-#if EXPERIMENTAL
- d_status_tmp1_.reset(new uint8_t[mb_cols * mb_rows]());
- d_status_tmp2_.reset(new uint8_t[mb_cols * mb_rows]());
-#endif
- x_density_.reset(new uint8_t[mb_cols]());
- y_density_.reset(new uint8_t[mb_rows]());
- }
- // Denoise on Y plane.
- uint8_t* y_dst = denoised_frame->buffer(kYPlane);
- uint8_t* u_dst = denoised_frame->buffer(kUPlane);
- uint8_t* v_dst = denoised_frame->buffer(kVPlane);
- uint8_t* y_dst_prev = denoised_frame_prev->buffer(kYPlane);
- const uint8_t* y_src = frame.buffer(kYPlane);
- const uint8_t* u_src = frame.buffer(kUPlane);
- const uint8_t* v_src = frame.buffer(kVPlane);
- uint8_t noise_level = noise_level_prev == -1 ? 0 : ne_->GetNoiseLevel();
- // Temporary buffer to store denoising result.
- uint8_t y_tmp[16 * 16] = {0};
- memset(x_density_.get(), 0, mb_cols);
- memset(y_density_.get(), 0, mb_rows);
+ // Set buffer pointers.
+ const uint8_t* y_src = frame.video_frame_buffer()->DataY();
+ const uint8_t* u_src = frame.video_frame_buffer()->DataU();
+ const uint8_t* v_src = frame.video_frame_buffer()->DataV();
+ uint8_t* y_dst = denoised_frame->video_frame_buffer()->MutableDataY();
+ uint8_t* u_dst = denoised_frame->video_frame_buffer()->MutableDataU();
+ uint8_t* v_dst = denoised_frame->video_frame_buffer()->MutableDataV();
+ uint8_t* y_dst_prev =
+ denoised_frame_prev->video_frame_buffer()->MutableDataY();
+ memset(x_density_.get(), 0, mb_cols_);
+ memset(y_density_.get(), 0, mb_rows_);
+ memset(moving_object_.get(), 1, mb_cols_ * mb_rows_);
+ uint8_t noise_level = noise_estimation_enabled ? ne_->GetNoiseLevel() : 0;
+ int thr_var_base = 16 * 16 * 2;
// Loop over blocks to accumulate/extract noise level and update x/y_density
// factors for moving object detection.
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
- uint8_t* mb_dst_prev =
- y_dst_prev + (mb_row << 4) * stride_y + (mb_col << 4);
- int mb_index = mb_row * mb_cols + mb_col;
-#if EXPERIMENTAL
- int pos_factor = PositionCheck(mb_row, mb_col, mb_rows, mb_cols);
- uint32_t thr_var_adp = 16 * 16 * 5 * (noise_level ? pos_factor : 1);
-#else
- uint32_t thr_var_adp = 16 * 16 * 5;
-#endif
- int brightness = 0;
- for (int i = 0; i < 16; ++i) {
- for (int j = 0; j < 16; ++j) {
- brightness += mb_src[i * stride_y + j];
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ const int mb_index_base = mb_row * mb_cols_;
+ const int offset_base = (mb_row << 4) * stride_y_;
+ const uint8_t* mb_src_base = y_src + offset_base;
+ uint8_t* mb_dst_base = y_dst + offset_base;
+ uint8_t* mb_dst_prev_base = y_dst_prev + offset_base;
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ const int mb_index = mb_index_base + mb_col;
+ const bool ne_enable = (mb_index % NOISE_SUBSAMPLE_INTERVAL == 0);
+ const int pos_factor = PositionCheck(mb_row, mb_col, noise_level);
+ const uint32_t thr_var_adp = thr_var_base * pos_factor;
+ const uint32_t offset_col = mb_col << 4;
+ const uint8_t* mb_src = mb_src_base + offset_col;
+ uint8_t* mb_dst = mb_dst_base + offset_col;
+ uint8_t* mb_dst_prev = mb_dst_prev_base + offset_col;
+
+ // TODO(jackychen): Need SSE2/NEON opt.
+ int luma = 0;
+ if (ne_enable) {
+ for (int i = 4; i < 12; ++i) {
+ for (int j = 4; j < 12; ++j) {
+ luma += mb_src[i * stride_y_ + j];
+ }
}
}
- // Get the denoised block.
- filter_->MbDenoise(mb_dst_prev, stride_y, y_tmp, 16, mb_src, stride_y, 0,
- 1, true);
- // The variance is based on the denoised blocks in time T and T-1.
- metrics_[mb_index].var = filter_->Variance16x8(
- mb_dst_prev, stride_y, y_tmp, 16, &metrics_[mb_index].sad);
+ // Get the filtered block and filter_decision.
+ mb_filter_decision_[mb_index] =
+ filter_->MbDenoise(mb_dst_prev, stride_y_, mb_dst, stride_y_, mb_src,
+ stride_y_, 0, noise_level);
- if (metrics_[mb_index].var > thr_var_adp) {
- ne_->ResetConsecLowVar(mb_index);
- d_status_[mb_index] = 1;
-#if EXPERIMENTAL
- if (noise_level == 0 || pos_factor < 3) {
- x_density_[mb_col] += 1;
- y_density_[mb_row] += 1;
+ // If filter decision is FILTER_BLOCK, no need to check moving edge.
+ // It is unlikely for a moving edge block to be filtered in current
+ // setting.
+ if (mb_filter_decision_[mb_index] == FILTER_BLOCK) {
+ uint32_t sse_t = 0;
+ if (ne_enable) {
+ // The variance used in noise estimation is based on the src block in
+ // time t (mb_src) and filtered block in time t-1 (mb_dist_prev).
+ uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y_,
+ mb_src, stride_y_, &sse_t);
+ ne_->GetNoise(mb_index, noise_var, luma);
}
-#else
- x_density_[mb_col] += 1;
- y_density_[mb_row] += 1;
-#endif
+ moving_edge_[mb_index] = 0; // Not a moving edge block.
} else {
uint32_t sse_t = 0;
- // The variance is based on the src blocks in time T and denoised block
- // in time T-1.
- uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y,
- mb_src, stride_y, &sse_t);
- ne_->GetNoise(mb_index, noise_var, brightness);
- d_status_[mb_index] = 0;
- }
- // Track denoised frame.
- filter_->CopyMem16x16(y_tmp, 16, mb_dst_prev, stride_y);
- }
- }
-
-#if EXPERIMENTAL
- ReduceFalseDetection(d_status_, &d_status_tmp1_, &d_status_tmp2_, noise_level,
- mb_rows, mb_cols);
-#endif
-
- // Denoise each MB based on the results of moving objects detection.
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
- uint8_t* mb_dst = y_dst + (mb_row << 4) * stride_y + (mb_col << 4);
- const uint8_t* mb_src_u =
- u_src + (mb_row << 3) * stride_u + (mb_col << 3);
- const uint8_t* mb_src_v =
- v_src + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u + (mb_col << 3);
- uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v + (mb_col << 3);
-#if EXPERIMENTAL
- if ((!d_status_tmp2_[mb_row * mb_cols + mb_col] ||
- x_density_[mb_col] * y_density_[mb_row] == 0) &&
- !TrailingBlock(d_status_, mb_row, mb_col, mb_rows, mb_cols)) {
-#else
- if (x_density_[mb_col] * y_density_[mb_row] == 0) {
-#endif
- if (filter_->MbDenoise(mb_dst, stride_y, y_tmp, 16, mb_src, stride_y, 0,
- noise_level, false) == FILTER_BLOCK) {
- filter_->CopyMem16x16(y_tmp, 16, mb_dst, stride_y);
+ // The variance used in MOD is based on the filtered blocks in time
+ // T (mb_dst) and T-1 (mb_dst_prev).
+ uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y_,
+ mb_dst, stride_y_, &sse_t);
+ if (noise_var > thr_var_adp) { // Moving edge checking.
+ if (ne_enable) {
+ ne_->ResetConsecLowVar(mb_index);
+ }
+ moving_edge_[mb_index] = 1; // Mark as moving edge block.
+ x_density_[mb_col] += (pos_factor < 3);
+ y_density_[mb_row] += (pos_factor < 3);
} else {
- // Copy y source.
- filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ moving_edge_[mb_index] = 0;
+ if (ne_enable) {
+ // The variance used in noise estimation is based on the src block
+ // in time t (mb_src) and filtered block in time t-1 (mb_dist_prev).
+ uint32_t noise_var = filter_->Variance16x8(
+ mb_dst_prev, stride_y_, mb_src, stride_y_, &sse_t);
+ ne_->GetNoise(mb_index, noise_var, luma);
+ }
}
- } else {
- // Copy y source.
- filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
}
- filter_->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter_->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
- }
- }
+ } // End of for loop
+ } // End of for loop
-#if DISPLAY // Rectangle diagnostics
- // Show rectangular region
- ShowRect(filter_, d_status_, d_status_tmp2_, x_density_, y_density_, u_src,
- v_src, u_dst, v_dst, mb_rows, mb_cols, stride_u, stride_v);
-#endif
+ ReduceFalseDetection(moving_edge_, &moving_object_, noise_level);
+
+ CopySrcOnMOB(y_src, y_dst);
+
+ // When frame width/height not divisible by 16, copy the margin to
+ // denoised_frame.
+ if ((mb_rows_ << 4) != height_ || (mb_cols_ << 4) != width_)
+ CopyLumaOnMargin(y_src, y_dst);
- // Setting time parameters to the output frame.
+ // TODO(jackychen): Need SSE2/NEON opt.
+ // Copy u/v planes.
+ memcpy(u_dst, u_src, (height_ >> 1) * stride_u_);
+ memcpy(v_dst, v_src, (height_ >> 1) * stride_v_);
+
+ // Set time parameters to the output frame.
denoised_frame->set_timestamp(frame.timestamp());
denoised_frame->set_render_time_ms(frame.render_time_ms());
- return;
+
+#if DISPLAY || DISPLAYNEON
+ // Show rectangular region
+ ShowRect(filter_, moving_edge_, moving_object_, x_density_, y_density_, u_src,
+ v_src, u_dst, v_dst, mb_rows_, mb_cols_, stride_u_, stride_v_);
+#endif
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
index 03b30d91c7f..114f663c03d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
+++ b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
@@ -22,25 +22,58 @@ namespace webrtc {
class VideoDenoiser {
public:
explicit VideoDenoiser(bool runtime_cpu_detection);
+
void DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
- VideoFrame* denoised_frame_track,
- int noise_level_prev);
+ VideoFrame* denoised_frame_prev,
+ bool noise_estimation_enabled);
private:
+ void DenoiserReset(const VideoFrame& frame,
+ VideoFrame* denoised_frame,
+ VideoFrame* denoised_frame_prev);
+
+ // Check the mb position, return 1: close to the frame center (between 1/8
+ // and 7/8 of width/height), 3: close to the border (out of 1/16 and 15/16
+ // of width/height), 2: in between.
+ int PositionCheck(int mb_row, int mb_col, int noise_level);
+
+ // To reduce false detection in moving object detection (MOD).
+ void ReduceFalseDetection(const std::unique_ptr<uint8_t[]>& d_status,
+ std::unique_ptr<uint8_t[]>* d_status_red,
+ int noise_level);
+
+ // Return whether a block might cause trailing artifact by checking if one of
+ // its neighbor blocks is a moving edge block.
+ bool IsTrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
+ int mb_row,
+ int mb_col);
+
+ // Copy input blocks to dst buffer on moving object blocks (MOB).
+ void CopySrcOnMOB(const uint8_t* y_src, uint8_t* y_dst);
+
+ // Copy luma margin blocks when frame width/height not divisible by 16.
+ void CopyLumaOnMargin(const uint8_t* y_src, uint8_t* y_dst);
+
int width_;
int height_;
+ int mb_rows_;
+ int mb_cols_;
+ int stride_y_;
+ int stride_u_;
+ int stride_v_;
CpuType cpu_type_;
- std::unique_ptr<DenoiseMetrics[]> metrics_;
std::unique_ptr<DenoiserFilter> filter_;
std::unique_ptr<NoiseEstimation> ne_;
- std::unique_ptr<uint8_t[]> d_status_;
-#if EXPERIMENTAL
- std::unique_ptr<uint8_t[]> d_status_tmp1_;
- std::unique_ptr<uint8_t[]> d_status_tmp2_;
-#endif
+ // 1 for moving edge block, 0 for static block.
+ std::unique_ptr<uint8_t[]> moving_edge_;
+ // 1 for moving object block, 0 for static block.
+ std::unique_ptr<uint8_t[]> moving_object_;
+ // x_density_ and y_density_ are used in MOD process.
std::unique_ptr<uint8_t[]> x_density_;
std::unique_ptr<uint8_t[]> y_density_;
+ // Save the return values by MbDenoise for each block.
+ std::unique_ptr<DenoiserDecision[]> mb_filter_decision_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi b/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
index 5bf0ea36c36..3e90fd21571 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
@@ -20,12 +20,6 @@
'sources': [
'include/video_processing.h',
'include/video_processing_defines.h',
- 'brightness_detection.cc',
- 'brightness_detection.h',
- 'content_analysis.cc',
- 'content_analysis.h',
- 'deflickering.cc',
- 'deflickering.h',
'frame_preprocessor.cc',
'frame_preprocessor.h',
'spatial_resampler.cc',
@@ -62,7 +56,6 @@
'target_name': 'video_processing_sse2',
'type': 'static_library',
'sources': [
- 'content_analysis_sse2.cc',
'util/denoiser_filter_sse2.cc',
'util/denoiser_filter_sse2.h',
],
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
index f34886f10f2..86f75bf239d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
@@ -18,21 +18,6 @@
namespace webrtc {
-namespace {
-
-int GetSubSamplingFactor(int width, int height) {
- if (width * height >= 640 * 480) {
- return 3;
- } else if (width * height >= 352 * 288) {
- return 2;
- } else if (width * height >= 176 * 144) {
- return 1;
- } else {
- return 0;
- }
-}
-} // namespace
-
VideoProcessing* VideoProcessing::Create() {
return new VideoProcessingImpl();
}
@@ -40,83 +25,6 @@ VideoProcessing* VideoProcessing::Create() {
VideoProcessingImpl::VideoProcessingImpl() {}
VideoProcessingImpl::~VideoProcessingImpl() {}
-void VideoProcessing::GetFrameStats(const VideoFrame& frame,
- FrameStats* stats) {
- ClearFrameStats(stats); // The histogram needs to be zeroed out.
- if (frame.IsZeroSize()) {
- return;
- }
-
- int width = frame.width();
- int height = frame.height();
- stats->sub_sampling_factor = GetSubSamplingFactor(width, height);
-
- const uint8_t* buffer = frame.buffer(kYPlane);
- // Compute histogram and sum of frame
- for (int i = 0; i < height; i += (1 << stats->sub_sampling_factor)) {
- int k = i * width;
- for (int j = 0; j < width; j += (1 << stats->sub_sampling_factor)) {
- stats->hist[buffer[k + j]]++;
- stats->sum += buffer[k + j];
- }
- }
-
- stats->num_pixels = (width * height) / ((1 << stats->sub_sampling_factor) *
- (1 << stats->sub_sampling_factor));
- assert(stats->num_pixels > 0);
-
- // Compute mean value of frame
- stats->mean = stats->sum / stats->num_pixels;
-}
-
-bool VideoProcessing::ValidFrameStats(const FrameStats& stats) {
- if (stats.num_pixels == 0) {
- LOG(LS_WARNING) << "Invalid frame stats.";
- return false;
- }
- return true;
-}
-
-void VideoProcessing::ClearFrameStats(FrameStats* stats) {
- stats->mean = 0;
- stats->sum = 0;
- stats->num_pixels = 0;
- stats->sub_sampling_factor = 0;
- memset(stats->hist, 0, sizeof(stats->hist));
-}
-
-void VideoProcessing::Brighten(int delta, VideoFrame* frame) {
- RTC_DCHECK(!frame->IsZeroSize());
- RTC_DCHECK(frame->width() > 0);
- RTC_DCHECK(frame->height() > 0);
-
- int num_pixels = frame->width() * frame->height();
-
- int look_up[256];
- for (int i = 0; i < 256; i++) {
- int val = i + delta;
- look_up[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
- }
-
- uint8_t* temp_ptr = frame->buffer(kYPlane);
- for (int i = 0; i < num_pixels; i++) {
- *temp_ptr = static_cast<uint8_t>(look_up[*temp_ptr]);
- temp_ptr++;
- }
-}
-
-int32_t VideoProcessingImpl::Deflickering(VideoFrame* frame,
- FrameStats* stats) {
- rtc::CritScope mutex(&mutex_);
- return deflickering_.ProcessFrame(frame, stats);
-}
-
-int32_t VideoProcessingImpl::BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) {
- rtc::CritScope mutex(&mutex_);
- return brightness_detection_.ProcessFrame(frame, stats);
-}
-
void VideoProcessingImpl::EnableTemporalDecimation(bool enable) {
rtc::CritScope mutex(&mutex_);
frame_pre_processor_.EnableTemporalDecimation(enable);
@@ -135,11 +43,6 @@ int32_t VideoProcessingImpl::SetTargetResolution(uint32_t width,
return frame_pre_processor_.SetTargetResolution(width, height, frame_rate);
}
-void VideoProcessingImpl::SetTargetFramerate(int frame_rate) {
- rtc::CritScope cs(&mutex_);
- frame_pre_processor_.SetTargetFramerate(frame_rate);
-}
-
uint32_t VideoProcessingImpl::GetDecimatedFrameRate() {
rtc::CritScope cs(&mutex_);
return frame_pre_processor_.GetDecimatedFrameRate();
@@ -155,9 +58,9 @@ uint32_t VideoProcessingImpl::GetDecimatedHeight() const {
return frame_pre_processor_.GetDecimatedHeight();
}
-void VideoProcessingImpl::EnableDenosing(bool enable) {
+void VideoProcessingImpl::EnableDenoising(bool enable) {
rtc::CritScope cs(&mutex_);
- frame_pre_processor_.EnableDenosing(enable);
+ frame_pre_processor_.EnableDenoising(enable);
}
const VideoFrame* VideoProcessingImpl::PreprocessFrame(
@@ -166,14 +69,4 @@ const VideoFrame* VideoProcessingImpl::PreprocessFrame(
return frame_pre_processor_.PreprocessFrame(frame);
}
-VideoContentMetrics* VideoProcessingImpl::GetContentMetrics() const {
- rtc::CritScope mutex(&mutex_);
- return frame_pre_processor_.GetContentMetrics();
-}
-
-void VideoProcessingImpl::EnableContentAnalysis(bool enable) {
- rtc::CritScope mutex(&mutex_);
- frame_pre_processor_.EnableContentAnalysis(enable);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
index 1d9a3775cf1..21e23c904dd 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
@@ -13,8 +13,6 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/brightness_detection.h"
-#include "webrtc/modules/video_processing/deflickering.h"
#include "webrtc/modules/video_processing/frame_preprocessor.h"
namespace webrtc {
@@ -26,28 +24,20 @@ class VideoProcessingImpl : public VideoProcessing {
~VideoProcessingImpl() override;
// Implements VideoProcessing.
- int32_t Deflickering(VideoFrame* frame, FrameStats* stats) override;
- int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) override;
void EnableTemporalDecimation(bool enable) override;
void SetInputFrameResampleMode(VideoFrameResampling resampling_mode) override;
- void EnableContentAnalysis(bool enable) override;
int32_t SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate) override;
- void SetTargetFramerate(int frame_rate) override;
uint32_t GetDecimatedFrameRate() override;
uint32_t GetDecimatedWidth() const override;
uint32_t GetDecimatedHeight() const override;
- void EnableDenosing(bool enable) override;
+ void EnableDenoising(bool enable) override;
const VideoFrame* PreprocessFrame(const VideoFrame& frame) override;
- VideoContentMetrics* GetContentMetrics() const override;
private:
rtc::CriticalSection mutex_;
- VPMDeflickering deflickering_ GUARDED_BY(mutex_);
- VPMBrightnessDetection brightness_detection_;
- VPMFramePreprocessor frame_pre_processor_;
+ VPMFramePreprocessor frame_pre_processor_ GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/BUILD.gn b/chromium/third_party/webrtc/modules/video_render/BUILD.gn
deleted file mode 100644
index 0771bd7080c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/BUILD.gn
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../../build/webrtc.gni")
-
-source_set("video_render_module") {
- sources = [
- "external/video_render_external_impl.cc",
- "external/video_render_external_impl.h",
- "i_video_render.h",
- "video_render.h",
- "video_render_defines.h",
- "video_render_impl.h",
- ]
-
- deps = [
- "../..:webrtc_common",
- "../../common_video",
- "../../system_wrappers",
- "../utility",
- ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
-}
-
-source_set("video_render") {
- sources = [
- "video_render_impl.cc",
- ]
- deps = [
- ":video_render_module",
- "../../system_wrappers",
- ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
-}
-
-if (!build_with_chromium) {
- config("video_render_internal_impl_config") {
- if (is_ios) {
- libs = [
- "OpenGLES.framework",
- "QuartzCore.framework",
- ]
- }
- }
-
- source_set("video_render_internal_impl") {
- libs = []
- sources = [
- "video_render_internal_impl.cc",
- ]
- deps = [
- ":video_render_module",
- "../../system_wrappers",
- ]
-
- if (is_linux) {
- sources += [
- "linux/video_render_linux_impl.cc",
- "linux/video_render_linux_impl.h",
- "linux/video_x11_channel.cc",
- "linux/video_x11_channel.h",
- "linux/video_x11_render.cc",
- "linux/video_x11_render.h",
- ]
-
- deps += [ "../..:webrtc_common" ]
-
- libs += [ "Xext" ]
- }
- if (is_mac) {
- sources += [
- "mac/cocoa_full_screen_window.h",
- "mac/cocoa_full_screen_window.mm",
- "mac/cocoa_render_view.h",
- "mac/cocoa_render_view.mm",
- "mac/video_render_agl.cc",
- "mac/video_render_agl.h",
- "mac/video_render_mac_carbon_impl.cc",
- "mac/video_render_mac_carbon_impl.h",
- "mac/video_render_mac_cocoa_impl.h",
- "mac/video_render_mac_cocoa_impl.mm",
- "mac/video_render_nsopengl.h",
- "mac/video_render_nsopengl.mm",
- ]
-
- libs += [
- "CoreVideo.framework",
- "QTKit.framework",
- ]
- }
- if (is_win) {
- sources += [
- "windows/i_video_render_win.h",
- "windows/video_render_direct3d9.cc",
- "windows/video_render_direct3d9.h",
- "windows/video_render_windows_impl.cc",
- "windows/video_render_windows_impl.h",
- ]
-
- directxsdk_exists =
- exec_script("//build/dir_exists.py",
- [ rebase_path("//third_party/directxsdk/files",
- root_build_dir) ],
- "trim string") == "True"
- if (directxsdk_exists) {
- directxsdk_path = "//third_party/directxsdk/files"
- } else {
- directxsdk_path =
- exec_script("../../build/find_directx_sdk.py", [], "trim string")
- }
- include_dirs = [ directxsdk_path + "/Include" ]
- }
- if (is_android) {
- sources += [
- "android/video_render_android_impl.cc",
- "android/video_render_android_impl.h",
- "android/video_render_android_native_opengl2.cc",
- "android/video_render_android_native_opengl2.h",
- "android/video_render_android_surface_view.cc",
- "android/video_render_android_surface_view.h",
- "android/video_render_opengles20.cc",
- "android/video_render_opengles20.h",
- ]
-
- libs += [ "GLESv2" ]
- }
- if (is_ios) {
- sources += [
- "ios/open_gles20.h",
- "ios/open_gles20.mm",
- "ios/video_render_ios_channel.h",
- "ios/video_render_ios_channel.mm",
- "ios/video_render_ios_gles20.h",
- "ios/video_render_ios_gles20.mm",
- "ios/video_render_ios_impl.h",
- "ios/video_render_ios_impl.mm",
- "ios/video_render_ios_view.h",
- "ios/video_render_ios_view.mm",
- ]
-
- deps += [ "../..:webrtc_common" ]
-
- cflags = [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
- }
-
- all_dependent_configs = [ ":video_render_internal_impl_config" ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
- }
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/DEPS b/chromium/third_party/webrtc/modules/video_render/DEPS
deleted file mode 100644
index 58ae9fe714f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/DEPS
+++ /dev/null
@@ -1,5 +0,0 @@
-include_rules = [
- "+webrtc/base",
- "+webrtc/common_video",
- "+webrtc/system_wrappers",
-]
diff --git a/chromium/third_party/webrtc/modules/video_render/OWNERS b/chromium/third_party/webrtc/modules/video_render/OWNERS
deleted file mode 100644
index 3aaa5328f5c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/OWNERS
+++ /dev/null
@@ -1,12 +0,0 @@
-mflodman@webrtc.org
-perkj@webrtc.org
-tkchin@webrtc.org
-
-per-file *.isolate=kjellander@webrtc.org
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
-
-per-file BUILD.gn=kjellander@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc
deleted file mode 100644
index 9affb23d99f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-
-#include "webrtc/modules/video_render/video_render_internal.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-JavaVM* VideoRenderAndroid::g_jvm = NULL;
-
-int32_t SetRenderAndroidVM(JavaVM* javaVM) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
- VideoRenderAndroid::g_jvm = javaVM;
- return 0;
-}
-
-VideoRenderAndroid::VideoRenderAndroid(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool /*fullscreen*/):
- _id(id),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderType(videoRenderType),
- _ptrWindow((jobject)(window)),
- _javaShutDownFlag(false),
- _javaShutdownEvent(*EventWrapper::Create()),
- _javaRenderEvent(*EventWrapper::Create()),
- _lastJavaRenderEvent(0),
- _javaRenderJniEnv(NULL) {
-}
-
-VideoRenderAndroid::~VideoRenderAndroid() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "VideoRenderAndroid dtor");
-
- if (_javaRenderThread)
- StopRender();
-
- for (AndroidStreamMap::iterator it = _streamsMap.begin();
- it != _streamsMap.end();
- ++it) {
- delete it->second;
- }
- delete &_javaShutdownEvent;
- delete &_javaRenderEvent;
- delete &_critSect;
-}
-
-int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
- return -1;
-}
-
-VideoRenderCallback*
-VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right,
- const float bottom) {
- CriticalSectionScoped cs(&_critSect);
-
- AndroidStream* renderStream = NULL;
- AndroidStreamMap::iterator item = _streamsMap.find(streamId);
- if (item != _streamsMap.end() && item->second != NULL) {
- WEBRTC_TRACE(kTraceInfo,
- kTraceVideoRenderer,
- -1,
- "%s: Render stream already exists",
- __FUNCTION__);
- return renderStream;
- }
-
- renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
- right, bottom, *this);
- if (renderStream) {
- _streamsMap[streamId] = renderStream;
- }
- else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return NULL;
- }
- return renderStream;
-}
-
-int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
- const uint32_t streamId) {
- CriticalSectionScoped cs(&_critSect);
-
- AndroidStreamMap::iterator item = _streamsMap.find(streamId);
- if (item == _streamsMap.end()) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return -1;
- }
- delete item->second;
- _streamsMap.erase(item);
- return 0;
-}
-
-int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- return -1;
-}
-
-int32_t VideoRenderAndroid::StartRender() {
- CriticalSectionScoped cs(&_critSect);
-
- if (_javaRenderThread) {
- // StartRender is called when this stream should start render.
- // However StopRender is not called when the streams stop rendering.
- // Thus the the thread is only deleted when the renderer is removed.
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s, Render thread already exist", __FUNCTION__);
- return 0;
- }
-
- _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this,
- "AndroidRenderThread"));
-
- _javaRenderThread->Start();
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started",
- __FUNCTION__);
- _javaRenderThread->SetPriority(rtc::kRealtimePriority);
- return 0;
-}
-
-int32_t VideoRenderAndroid::StopRender() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
- {
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderThread)
- {
- return -1;
- }
- _javaShutDownFlag = true;
- _javaRenderEvent.Set();
- }
-
- _javaShutdownEvent.Wait(3000);
- CriticalSectionScoped cs(&_critSect);
- _javaRenderThread->Stop();
- _javaRenderThread.reset();
-
- return 0;
-}
-
-void VideoRenderAndroid::ReDraw() {
- CriticalSectionScoped cs(&_critSect);
- // Allow redraw if it was more than 20ms since last.
- if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
- _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
- _javaRenderEvent.Set();
- }
-}
-
-bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
- return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
-}
-
-bool VideoRenderAndroid::JavaRenderThreadProcess()
-{
- _javaRenderEvent.Wait(1000);
-
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderJniEnv) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !_javaRenderJniEnv) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, _javaRenderJniEnv);
- return false;
- }
- }
-
- for (AndroidStreamMap::iterator it = _streamsMap.begin();
- it != _streamsMap.end();
- ++it) {
- it->second->DeliverFrame(_javaRenderJniEnv);
- }
-
- if (_javaShutDownFlag) {
- if (g_jvm->DetachCurrentThread() < 0)
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- else {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s: Java thread detached", __FUNCTION__);
- }
- _javaRenderJniEnv = NULL;
- _javaShutDownFlag = false;
- _javaShutdownEvent.Set();
- return false; // Do not run this thread again.
- }
- return true;
-}
-
-VideoRenderType VideoRenderAndroid::RenderType() {
- return _renderType;
-}
-
-RawVideoType VideoRenderAndroid::PerferedVideoType() {
- return kVideoI420;
-}
-
-bool VideoRenderAndroid::FullScreen() {
- return false;
-}
-
-int32_t VideoRenderAndroid::GetGraphicsMemory(
- uint64_t& /*totalGraphicsMemory*/,
- uint64_t& /*availableGraphicsMemory*/) const {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::GetScreenResolution(
- uint32_t& /*screenWidth*/,
- uint32_t& /*screenHeight*/) const {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-uint32_t VideoRenderAndroid::RenderFrameRate(
- const uint32_t /*streamId*/) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetStreamCropping(
- const uint32_t /*streamId*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right,
- const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h
deleted file mode 100644
index 06fd7a1c7cd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-
-#include <jni.h>
-
-#include <map>
-#include <memory>
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-
-
-namespace webrtc {
-
-//#define ANDROID_LOG
-
-class CriticalSectionWrapper;
-class EventWrapper;
-
-// The object a module user uses to send new frames to the java renderer
-// Base class for android render streams.
-
-class AndroidStream : public VideoRenderCallback {
- public:
- // DeliverFrame is called from a thread connected to the Java VM.
- // Used for Delivering frame for rendering.
- virtual void DeliverFrame(JNIEnv* jniEnv)=0;
-
- virtual ~AndroidStream() {};
-};
-
-class VideoRenderAndroid: IVideoRender {
- public:
- VideoRenderAndroid(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderAndroid();
-
- virtual int32_t Init()=0;
-
- virtual int32_t ChangeWindow(void* window);
-
- virtual VideoRenderCallback* AddIncomingRenderStream(
- const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(
- const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- virtual void ReDraw();
-
- // Properties
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey, const float left,
- const float top, const float right,
- const float bottom);
- static JavaVM* g_jvm;
-
- protected:
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) = 0;
-
- int32_t _id;
- CriticalSectionWrapper& _critSect;
- VideoRenderType _renderType;
- jobject _ptrWindow;
-
- private:
- static bool JavaRenderThreadFun(void* obj);
- bool JavaRenderThreadProcess();
-
- // Map with streams to render.
- typedef std::map<int32_t, AndroidStream*> AndroidStreamMap;
- AndroidStreamMap _streamsMap;
- // True if the _javaRenderThread thread shall be detached from the JVM.
- bool _javaShutDownFlag;
- EventWrapper& _javaShutdownEvent;
- EventWrapper& _javaRenderEvent;
- int64_t _lastJavaRenderEvent;
- JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
- // TODO(pbos): Remove unique_ptr and use the member directly.
- std::unique_ptr<rtc::PlatformThread> _javaRenderThread;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
deleted file mode 100644
index 286776e317b..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
+++ /dev/null
@@ -1,450 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- VideoRenderAndroid(id, videoRenderType, window, fullscreen),
- _javaRenderObj(NULL),
- _javaRenderClass(NULL) {
-}
-
-bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "RendererAndroid():UseOpenGL No JVM set.");
- return false;
- }
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- -1,
- "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
- res, env);
- return false;
- }
- isAttached = true;
- }
-
- // get the renderer class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not find ViEAndroidRenderer class",
- __FUNCTION__);
- return false;
- }
-
- // get the method ID for UseOpenGL
- jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
- "UseOpenGL2",
- "(Ljava/lang/Object;)Z");
- if (cidUseOpenGL == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not get UseOpenGL ID", __FUNCTION__);
- return false;
- }
- jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
- cidUseOpenGL, (jobject) window);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
- return res;
-}
-
-AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidNativeOpenGl2Renderer dtor");
- if (g_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- env = NULL;
- }
- else {
- isAttached = true;
- }
- }
-
- env->DeleteGlobalRef(_javaRenderObj);
- env->DeleteGlobalRef(_javaRenderClass);
-
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidNativeOpenGl2Renderer::Init() {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s): Not a valid Java VM pointer.", __FUNCTION__);
- return -1;
- }
- if (!_ptrWindow) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "(%s): No window have been provided.", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- // get the ViEAndroidGLES20 class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not find ViEAndroidGLES20", __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class (to tell JNI that
- // we are referencing it after this function has returned)
- _javaRenderClass =
- reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
- if (!_javaRenderClass) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not create Java SurfaceHolder class reference",
- __FUNCTION__);
- return -1;
- }
-
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaRenderClassLocal);
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaRenderObj = env->NewGlobalRef(_ptrWindow);
- if (!_javaRenderObj) {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java SurfaceRender object reference",
- __FUNCTION__);
- return -1;
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
- __FUNCTION__);
- return 0;
-
-}
-AndroidStream*
-AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
- __FUNCTION__, streamId);
- AndroidNativeOpenGl2Channel* stream =
- new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
- _javaRenderObj);
- if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
- return stream;
- else {
- delete stream;
- }
- return NULL;
-}
-
-AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,jobject javaRenderObj):
- _id(streamId),
- _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
- _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
- _openGLRenderer(streamId) {
-
-}
-AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidNativeOpenGl2Channel dtor");
- if (_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- env = NULL;
- } else {
- isAttached = true;
- }
- }
- if (env && _deRegisterNativeCID) {
- env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
- }
-
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-
- delete &_renderCritSect;
-}
-
-int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
- if (!_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Not a valid Java VM pointer", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- jclass javaRenderClass =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClass) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not find ViESurfaceRenderer", __FUNCTION__);
- return -1;
- }
-
- // get the method ID for the ReDraw function
- _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
- if (_redrawCid == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get ReDraw ID", __FUNCTION__);
- return -1;
- }
-
- _registerNativeCID = env->GetMethodID(javaRenderClass,
- "RegisterNativeObject", "(J)V");
- if (_registerNativeCID == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get RegisterNativeObject ID", __FUNCTION__);
- return -1;
- }
-
- _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
- "DeRegisterNativeObject", "()V");
- if (_deRegisterNativeCID == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get DeRegisterNativeObject ID",
- __FUNCTION__);
- return -1;
- }
-
- JNINativeMethod nativeFunctions[2] = {
- { "DrawNative",
- "(J)V",
- (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
- { "CreateOpenGLNative",
- "(JII)I",
- (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
- };
- if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
- "%s: Registered native functions", __FUNCTION__);
- }
- else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: Failed to register native functions", __FUNCTION__);
- return -1;
- }
-
- env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
- return -1;
- }
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
- return 0;
-}
-
-int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
- _renderCritSect.Enter();
- _bufferToRender = videoFrame;
- _renderCritSect.Leave();
- _renderer.ReDraw();
- return 0;
-}
-
-/*Implements AndroidStream
- * Calls the Java object and render the buffer in _bufferToRender
- */
-void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
- //TickTime timeNow=TickTime::Now();
-
- //Draw the Surface
- jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
-
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
- // "%s: time to deliver %lld" ,__FUNCTION__,
- // (TickTime::Now()-timeNow).Milliseconds());
-}
-
-/*
- * JNI callback from Java class. Called when the render
- * want to render a frame. Called from the GLRenderThread
- * Method: DrawNative
- * Signature: (J)V
- */
-void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
- JNIEnv * env, jobject, jlong context) {
- AndroidNativeOpenGl2Channel* renderChannel =
- reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
- renderChannel->DrawNative();
-}
-
-void AndroidNativeOpenGl2Channel::DrawNative() {
- _renderCritSect.Enter();
- _openGLRenderer.Render(_bufferToRender);
- _renderCritSect.Leave();
-}
-
-/*
- * JNI callback from Java class. Called when the GLSurfaceview
- * have created a surface. Called from the GLRenderThread
- * Method: CreateOpenGLNativeStatic
- * Signature: (JII)I
- */
-jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
- JNIEnv * env,
- jobject,
- jlong context,
- jint width,
- jint height) {
- AndroidNativeOpenGl2Channel* renderChannel =
- reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
- return renderChannel->CreateOpenGLNative(width, height);
-}
-
-jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
- int width, int height) {
- return _openGLRenderer.Setup(width, height);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
deleted file mode 100644
index 8be247b8342..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/android/video_render_opengles20.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class AndroidNativeOpenGl2Channel: public AndroidStream {
- public:
- AndroidNativeOpenGl2Channel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,jobject javaRenderObj);
- ~AndroidNativeOpenGl2Channel();
-
- int32_t Init(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- //Implement VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
- private:
- static jint JNICALL CreateOpenGLNativeStatic(
- JNIEnv * env,
- jobject,
- jlong context,
- jint width,
- jint height);
- jint CreateOpenGLNative(int width, int height);
-
- static void JNICALL DrawNativeStatic(JNIEnv * env,jobject, jlong context);
- void DrawNative();
- uint32_t _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
- jmethodID _redrawCid;
- jmethodID _registerNativeCID;
- jmethodID _deRegisterNativeCID;
- VideoRenderOpenGles20 _openGLRenderer;
-};
-
-
-class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
- public:
- AndroidNativeOpenGl2Renderer(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- ~AndroidNativeOpenGl2Renderer();
- static bool UseOpenGL2(void* window);
-
- int32_t Init();
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
-
- private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc
deleted file mode 100644
index ea3b106b1ed..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc
+++ /dev/null
@@ -1,474 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- VideoRenderAndroid(id,videoRenderType,window,fullscreen),
- _javaRenderObj(NULL),
- _javaRenderClass(NULL) {
-}
-
-AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidSurfaceViewRenderer dtor");
- if(g_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- env=NULL;
- }
- else {
- isAttached = true;
- }
- }
- env->DeleteGlobalRef(_javaRenderObj);
- env->DeleteGlobalRef(_javaRenderClass);
-
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidSurfaceViewRenderer::Init() {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "(%s): Not a valid Java VM pointer.",
- __FUNCTION__);
- return -1;
- }
- if(!_ptrWindow) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "(%s): No window have been provided.",
- __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- return -1;
- }
- isAttached = true;
- }
-
- // get the ViESurfaceRender class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not find ViESurfaceRenderer",
- __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class (to tell JNI that
- // we are referencing it after this function has returned)
- _javaRenderClass =
- reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
- if (!_javaRenderClass) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java ViESurfaceRenderer class reference",
- __FUNCTION__);
- return -1;
- }
-
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaRenderClassLocal);
-
- // get the method ID for the constructor
- jmethodID cid = env->GetMethodID(_javaRenderClass,
- "<init>",
- "(Landroid/view/SurfaceView;)V");
- if (cid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get constructor ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // construct the object
- jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
- cid,
- _ptrWindow);
- if (!javaRenderObjLocal) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java Render",
- __FUNCTION__);
- return -1;
- }
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
- if (!_javaRenderObj) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java SurfaceRender object reference",
- __FUNCTION__);
- return -1;
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
- return 0;
-}
-
-AndroidStream*
-AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) {
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: Id %d",
- __FUNCTION__,
- streamId);
- AndroidSurfaceViewChannel* stream =
- new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
- if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
- return stream;
- else
- delete stream;
- return NULL;
-}
-
-AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,
- jobject javaRenderObj) :
- _id(streamId),
- _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderer(renderer),
- _jvm(jvm),
- _javaRenderObj(javaRenderObj),
-#ifndef ANDROID_NDK_8_OR_ABOVE
- _javaByteBufferObj(NULL),
- _directBuffer(NULL),
-#endif
- _bitmapWidth(0),
- _bitmapHeight(0) {
-}
-
-AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
- WEBRTC_TRACE(kTraceInfo,
- kTraceVideoRenderer,
- _id,
- "AndroidSurfaceViewChannel dtor");
- delete &_renderCritSect;
- if(_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- env=NULL;
- }
- else {
- isAttached = true;
- }
- }
-
- env->DeleteGlobalRef(_javaByteBufferObj);
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidSurfaceViewChannel::Init(
- int32_t /*zOrder*/,
- const float left,
- const float top,
- const float right,
- const float bottom) {
-
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: AndroidSurfaceViewChannel",
- __FUNCTION__);
- if (!_jvm) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Not a valid Java VM pointer",
- __FUNCTION__);
- return -1;
- }
-
- if( (top > 1 || top < 0) ||
- (right > 1 || right < 0) ||
- (bottom > 1 || bottom < 0) ||
- (left > 1 || left < 0)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Wrong coordinates", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- return -1;
- }
- isAttached = true;
- }
-
- jclass javaRenderClass =
- env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClass) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not find ViESurfaceRenderer",
- __FUNCTION__);
- return -1;
- }
-
- // get the method ID for the CreateIntArray
- _createByteBufferCid =
- env->GetMethodID(javaRenderClass,
- "CreateByteBuffer",
- "(II)Ljava/nio/ByteBuffer;");
- if (_createByteBufferCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get CreateByteBuffer ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // get the method ID for the DrawByteBuffer function
- _drawByteBufferCid = env->GetMethodID(javaRenderClass,
- "DrawByteBuffer",
- "()V");
- if (_drawByteBufferCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get DrawByteBuffer ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // get the method ID for the SetCoordinates function
- _setCoordinatesCid = env->GetMethodID(javaRenderClass,
- "SetCoordinates",
- "(FFFF)V");
- if (_setCoordinatesCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get SetCoordinates ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
- left, top, right, bottom);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: AndroidSurfaceViewChannel done",
- __FUNCTION__);
- return 0;
-}
-
-int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
- _renderCritSect.Enter();
- _bufferToRender = videoFrame;
- _renderCritSect.Leave();
- _renderer.ReDraw();
- return 0;
-}
-
-
-/*Implements AndroidStream
- * Calls the Java object and render the buffer in _bufferToRender
- */
-void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
- _renderCritSect.Enter();
-
- if (_bitmapWidth != _bufferToRender.width() ||
- _bitmapHeight != _bufferToRender.height()) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
- "%d",__FUNCTION__,
- _bufferToRender.width(), _bufferToRender.height());
- if (_javaByteBufferObj) {
- jniEnv->DeleteGlobalRef(_javaByteBufferObj);
- _javaByteBufferObj = NULL;
- _directBuffer = NULL;
- }
-
- jobject javaByteBufferObj =
- jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
- _bufferToRender.width(),
- _bufferToRender.height());
- _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
- if (!_javaByteBufferObj) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
- "create Java ByteBuffer object reference", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- } else {
- _directBuffer = static_cast<unsigned char*>
- (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
- _bitmapWidth = _bufferToRender.width();
- _bitmapHeight = _bufferToRender.height();
- }
- }
-
- if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
- const int conversionResult =
- ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer);
-
- if (conversionResult < 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
- " failed.", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- }
- }
- _renderCritSect.Leave();
- // Draw the Surface
- jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h
deleted file mode 100644
index 0f029b54f34..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class AndroidSurfaceViewChannel : public AndroidStream {
- public:
- AndroidSurfaceViewChannel(uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,
- jobject javaRenderObj);
- ~AndroidSurfaceViewChannel();
-
- int32_t Init(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- //Implement VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
- private:
- uint32_t _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
- jobject _javaByteBufferObj;
- unsigned char* _directBuffer;
- jmethodID _createByteBufferCid;
- jmethodID _drawByteBufferCid;
-
- jmethodID _setCoordinatesCid;
- int _bitmapWidth;
- int _bitmapHeight;
-};
-
-class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
- public:
- AndroidSurfaceViewRenderer(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
- ~AndroidSurfaceViewRenderer();
- int32_t Init();
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
- private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc
deleted file mode 100644
index 45db56a4f6e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "webrtc/modules/video_render/android/video_render_opengles20.h"
-
-//#define ANDROID_LOG
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
-
-const char VideoRenderOpenGles20::g_vertextShader[] = {
- "attribute vec4 aPosition;\n"
- "attribute vec2 aTextureCoord;\n"
- "varying vec2 vTextureCoord;\n"
- "void main() {\n"
- " gl_Position = aPosition;\n"
- " vTextureCoord = aTextureCoord;\n"
- "}\n" };
-
-// The fragment shader.
-// Do YUV to RGB565 conversion.
-const char VideoRenderOpenGles20::g_fragmentShader[] = {
- "precision mediump float;\n"
- "uniform sampler2D Ytex;\n"
- "uniform sampler2D Utex,Vtex;\n"
- "varying vec2 vTextureCoord;\n"
- "void main(void) {\n"
- " float nx,ny,r,g,b,y,u,v;\n"
- " mediump vec4 txl,ux,vx;"
- " nx=vTextureCoord[0];\n"
- " ny=vTextureCoord[1];\n"
- " y=texture2D(Ytex,vec2(nx,ny)).r;\n"
- " u=texture2D(Utex,vec2(nx,ny)).r;\n"
- " v=texture2D(Vtex,vec2(nx,ny)).r;\n"
-
- //" y = v;\n"+
- " y=1.1643*(y-0.0625);\n"
- " u=u-0.5;\n"
- " v=v-0.5;\n"
-
- " r=y+1.5958*v;\n"
- " g=y-0.39173*u-0.81290*v;\n"
- " b=y+2.017*u;\n"
- " gl_FragColor=vec4(r,g,b,1.0);\n"
- "}\n" };
-
-VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
- _id(id),
- _textureWidth(-1),
- _textureHeight(-1) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
- __FUNCTION__, (int) _id);
-
- const GLfloat vertices[20] = {
- // X, Y, Z, U, V
- -1, -1, 0, 0, 1, // Bottom Left
- 1, -1, 0, 1, 1, //Bottom Right
- 1, 1, 0, 1, 0, //Top Right
- -1, 1, 0, 0, 0 }; //Top Left
-
- memcpy(_vertices, vertices, sizeof(_vertices));
-}
-
-VideoRenderOpenGles20::~VideoRenderOpenGles20() {
-}
-
-int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: width %d, height %d", __FUNCTION__, (int) width,
- (int) height);
-
- printGLString("Version", GL_VERSION);
- printGLString("Vendor", GL_VENDOR);
- printGLString("Renderer", GL_RENDERER);
- printGLString("Extensions", GL_EXTENSIONS);
-
- int maxTextureImageUnits[2];
- int maxTextureSize[2];
- glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: number of textures %d, size %d", __FUNCTION__,
- (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
-
- _program = createProgram(g_vertextShader, g_fragmentShader);
- if (!_program) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not create program", __FUNCTION__);
- return -1;
- }
-
- int positionHandle = glGetAttribLocation(_program, "aPosition");
- checkGlError("glGetAttribLocation aPosition");
- if (positionHandle == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not get aPosition handle", __FUNCTION__);
- return -1;
- }
-
- int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
- checkGlError("glGetAttribLocation aTextureCoord");
- if (textureHandle == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not get aTextureCoord handle", __FUNCTION__);
- return -1;
- }
-
- // set the vertices array in the shader
- // _vertices contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
- 5 * sizeof(GLfloat), _vertices);
- checkGlError("glVertexAttribPointer aPosition");
-
- glEnableVertexAttribArray(positionHandle);
- checkGlError("glEnableVertexAttribArray positionHandle");
-
- // set the texture coordinate array in the shader
- // _vertices contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
- * sizeof(GLfloat), &_vertices[3]);
- checkGlError("glVertexAttribPointer maTextureHandle");
- glEnableVertexAttribArray(textureHandle);
- checkGlError("glEnableVertexAttribArray textureHandle");
-
- glUseProgram(_program);
- int i = glGetUniformLocation(_program, "Ytex");
- checkGlError("glGetUniformLocation");
- glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
- checkGlError("glUniform1i Ytex");
-
- i = glGetUniformLocation(_program, "Utex");
- checkGlError("glGetUniformLocation Utex");
- glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
- checkGlError("glUniform1i Utex");
-
- i = glGetUniformLocation(_program, "Vtex");
- checkGlError("glGetUniformLocation");
- glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
- checkGlError("glUniform1i");
-
- glViewport(0, 0, width, height);
- checkGlError("glViewport");
- return 0;
-}
-
-// SetCoordinates
-// Sets the coordinates where the stream shall be rendered.
-// Values must be between 0 and 1.
-int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
- (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Wrong coordinates", __FUNCTION__);
- return -1;
- }
-
- // X, Y, Z, U, V
- // -1, -1, 0, 0, 1, // Bottom Left
- // 1, -1, 0, 1, 1, //Bottom Right
- // 1, 1, 0, 1, 0, //Top Right
- // -1, 1, 0, 0, 0 //Top Left
-
- // Bottom Left
- _vertices[0] = (left * 2) - 1;
- _vertices[1] = -1 * (2 * bottom) + 1;
- _vertices[2] = zOrder;
-
- //Bottom Right
- _vertices[5] = (right * 2) - 1;
- _vertices[6] = -1 * (2 * bottom) + 1;
- _vertices[7] = zOrder;
-
- //Top Right
- _vertices[10] = (right * 2) - 1;
- _vertices[11] = -1 * (2 * top) + 1;
- _vertices[12] = zOrder;
-
- //Top Left
- _vertices[15] = (left * 2) - 1;
- _vertices[16] = -1 * (2 * top) + 1;
- _vertices[17] = zOrder;
-
- return 0;
-}
-
-int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
- if (frameToRender.IsZeroSize()) {
- return -1;
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
- __FUNCTION__, (int) _id);
-
- glUseProgram(_program);
- checkGlError("glUseProgram");
-
- if (_textureWidth != (GLsizei) frameToRender.width() ||
- _textureHeight != (GLsizei) frameToRender.height()) {
- SetupTextures(frameToRender);
- }
- UpdateTextures(frameToRender);
-
- glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
- checkGlError("glDrawArrays");
-
- return 0;
-}
-
-GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
- const char* pSource) {
- GLuint shader = glCreateShader(shaderType);
- if (shader) {
- glShaderSource(shader, 1, &pSource, NULL);
- glCompileShader(shader);
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint infoLen = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
- if (infoLen) {
- char* buf = (char*) malloc(infoLen);
- if (buf) {
- glGetShaderInfoLog(shader, infoLen, NULL, buf);
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not compile shader %d: %s",
- __FUNCTION__, shaderType, buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- }
- }
- return shader;
-}
-
-GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
- const char* pFragmentSource) {
- GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
- if (!vertexShader) {
- return 0;
- }
-
- GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
- if (!pixelShader) {
- return 0;
- }
-
- GLuint program = glCreateProgram();
- if (program) {
- glAttachShader(program, vertexShader);
- checkGlError("glAttachShader");
- glAttachShader(program, pixelShader);
- checkGlError("glAttachShader");
- glLinkProgram(program);
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
- if (linkStatus != GL_TRUE) {
- GLint bufLength = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
- if (bufLength) {
- char* buf = (char*) malloc(bufLength);
- if (buf) {
- glGetProgramInfoLog(program, bufLength, NULL, buf);
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not link program: %s",
- __FUNCTION__, buf);
- free(buf);
- }
- }
- glDeleteProgram(program);
- program = 0;
- }
- }
- return program;
-}
-
-void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
- const char *v = (const char *) glGetString(s);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
- name, v);
-}
-
-void VideoRenderOpenGles20::checkGlError(const char* op) {
-#ifdef ANDROID_LOG
- for (GLint error = glGetError(); error; error = glGetError()) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "after %s() glError (0x%x)\n", op, error);
- }
-#else
- return;
-#endif
-}
-
-static void InitializeTexture(int name, int id, int width, int height) {
- glActiveTexture(name);
- glBindTexture(GL_TEXTURE_2D, id);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
-}
-
-void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: width %d, height %d", __FUNCTION__,
- frameToRender.width(), frameToRender.height());
-
- const GLsizei width = frameToRender.width();
- const GLsizei height = frameToRender.height();
-
- glGenTextures(3, _textureIds); //Generate the Y, U and V texture
- InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height);
- InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2);
- InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2);
-
- checkGlError("SetupTextures");
-
- _textureWidth = width;
- _textureHeight = height;
-}
-
-// Uploads a plane of pixel data, accounting for stride != width*bpp.
-static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
- const uint8_t* plane) {
- if (stride == width) {
- // Yay! We can upload the entire plane in a single GL call.
- glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane));
- } else {
- // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't
- // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
- for (int row = 0; row < height; ++row) {
- glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane + (row * stride)));
- }
- }
-}
-
-void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
- const GLsizei width = frameToRender.width();
- const GLsizei height = frameToRender.height();
-
- glActiveTexture(GL_TEXTURE0);
- glBindTexture(GL_TEXTURE_2D, _textureIds[0]);
- GlTexSubImage2D(width, height, frameToRender.stride(kYPlane),
- frameToRender.buffer(kYPlane));
-
- glActiveTexture(GL_TEXTURE1);
- glBindTexture(GL_TEXTURE_2D, _textureIds[1]);
- GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane),
- frameToRender.buffer(kUPlane));
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, _textureIds[2]);
- GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane),
- frameToRender.buffer(kVPlane));
-
- checkGlError("UpdateTextures");
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h
deleted file mode 100644
index 57e2a10d42e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-namespace webrtc
-{
-
-class VideoRenderOpenGles20 {
- public:
- VideoRenderOpenGles20(int32_t id);
- ~VideoRenderOpenGles20();
-
- int32_t Setup(int32_t widht, int32_t height);
- int32_t Render(const VideoFrame& frameToRender);
- int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- private:
- void printGLString(const char *name, GLenum s);
- void checkGlError(const char* op);
- GLuint loadShader(GLenum shaderType, const char* pSource);
- GLuint createProgram(const char* pVertexSource,
- const char* pFragmentSource);
- void SetupTextures(const VideoFrame& frameToRender);
- void UpdateTextures(const VideoFrame& frameToRender);
-
- int32_t _id;
- GLuint _textureIds[3]; // Texture id of Y,U and V texture.
- GLuint _program;
- GLsizei _textureWidth;
- GLsizei _textureHeight;
-
- GLfloat _vertices[20];
- static const char g_indices[];
-
- static const char g_vertextShader[];
- static const char g_fragmentShader[];
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc b/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc
deleted file mode 100644
index 58df07875ec..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-
-namespace webrtc {
-
-VideoRenderExternalImpl::VideoRenderExternalImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _fullscreen(fullscreen)
-{
-}
-
-VideoRenderExternalImpl::~VideoRenderExternalImpl()
-{
- delete &_critSect;
-}
-
-int32_t VideoRenderExternalImpl::Init()
-{
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::ChangeWindow(void* window)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderExternalImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return this;
-}
-
-int32_t VideoRenderExternalImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- CriticalSectionScoped cs(&_critSect);
-
- zOrder = 0;
- left = 0;
- top = 0;
- right = 0;
- bottom = 0;
-
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::StartRender()
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::StopRender()
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-VideoRenderType VideoRenderExternalImpl::RenderType()
-{
- return kRenderExternal;
-}
-
-RawVideoType VideoRenderExternalImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderExternalImpl::FullScreen()
-{
- CriticalSectionScoped cs(&_critSect);
- return _fullscreen;
-}
-
-int32_t VideoRenderExternalImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return -1;
-}
-
-int32_t VideoRenderExternalImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_critSect);
- screenWidth = 0;
- screenHeight = 0;
- return 0;
-}
-
-uint32_t VideoRenderExternalImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetTransparentBackground(
- const bool enable)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-// VideoRenderCallback
-int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- return 0;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h b/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h
deleted file mode 100644
index a8b663fff7e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderExternalImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderExternalImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey, const float left,
- const float top, const float right,
- const float bottom);
-
- // VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
-private:
- CriticalSectionWrapper& _critSect;
- bool _fullscreen;
-};
-
-} // namespace webrtc
-
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/i_video_render.h b/chromium/third_party/webrtc/modules/video_render/i_video_render.h
deleted file mode 100644
index e6ec7a4680b..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/i_video_render.h
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
-
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-
-// Class definitions
-class IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- virtual ~IVideoRender() {}
-
- virtual int32_t Init() = 0;
-
- virtual int32_t ChangeWindow(void* window) = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId) = 0;
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const = 0;
- // Implemented in common code?
- //virtual uint32_t GetNumIncomingRenderStreams() const = 0;
- //virtual bool HasIncomingRenderStream(const uint16_t stramId) const = 0;
-
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender() = 0;
-
- virtual int32_t StopRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
- virtual VideoRenderType RenderType() = 0;
-
- virtual RawVideoType PerferedVideoType() = 0;
-
- virtual bool FullScreen() = 0;
-
- // TODO: This should be treated in platform specific code only
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const = 0;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const = 0;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float rigth,
- const float bottom) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h
deleted file mode 100644
index 880ddb5231f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
-
-#include <OpenGLES/ES2/glext.h>
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-/*
- * This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
- * windows used in the VideoRenderIosView class.
- */
-namespace webrtc {
-class OpenGles20 {
- public:
- OpenGles20();
- ~OpenGles20();
-
- bool Setup(int32_t width, int32_t height);
- bool Render(const VideoFrame& frame);
-
- // SetCoordinates
- // Sets the coordinates where the stream shall be rendered.
- // Values must be between 0 and 1.
- bool SetCoordinates(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- private:
- // Compile and load the vertex and fragment shaders defined at the top of
- // open_gles20.mm
- GLuint LoadShader(GLenum shader_type, const char* shader_source);
-
- GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
-
- // Initialize the textures by the frame width and height
- void SetupTextures(const VideoFrame& frame);
-
- // Update the textures by the YUV data from the frame
- void UpdateTextures(const VideoFrame& frame);
-
- GLuint texture_ids_[3]; // Texture id of Y,U and V texture.
- GLuint program_;
- GLsizei texture_width_;
- GLsizei texture_height_;
-
- GLfloat vertices_[20];
- static const char indices_[];
- static const char vertext_shader_[];
- static const char fragment_shader_[];
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
deleted file mode 100644
index d1735280f26..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-// This files is mostly copied from
-// webrtc/modules/video_render/android/video_render_opengles20.h
-
-// TODO(sjlee): unify this copy with the android one.
-#include "webrtc/modules/video_render/ios/open_gles20.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1};
-
-const char OpenGles20::vertext_shader_[] = {
- "attribute vec4 aPosition;\n"
- "attribute vec2 aTextureCoord;\n"
- "varying vec2 vTextureCoord;\n"
- "void main() {\n"
- " gl_Position = aPosition;\n"
- " vTextureCoord = aTextureCoord;\n"
- "}\n"};
-
-// The fragment shader.
-// Do YUV to RGB565 conversion.
-const char OpenGles20::fragment_shader_[] = {
- "precision mediump float;\n"
- "uniform sampler2D Ytex;\n"
- "uniform sampler2D Utex,Vtex;\n"
- "varying vec2 vTextureCoord;\n"
- "void main(void) {\n"
- " float nx,ny,r,g,b,y,u,v;\n"
- " mediump vec4 txl,ux,vx;"
- " nx=vTextureCoord[0];\n"
- " ny=vTextureCoord[1];\n"
- " y=texture2D(Ytex,vec2(nx,ny)).r;\n"
- " u=texture2D(Utex,vec2(nx,ny)).r;\n"
- " v=texture2D(Vtex,vec2(nx,ny)).r;\n"
- " y=1.1643*(y-0.0625);\n"
- " u=u-0.5;\n"
- " v=v-0.5;\n"
- " r=y+1.5958*v;\n"
- " g=y-0.39173*u-0.81290*v;\n"
- " b=y+2.017*u;\n"
- " gl_FragColor=vec4(r,g,b,1.0);\n"
- "}\n"};
-
-OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) {
- texture_ids_[0] = 0;
- texture_ids_[1] = 0;
- texture_ids_[2] = 0;
-
- program_ = 0;
-
- const GLfloat vertices[20] = {
- // X, Y, Z, U, V
- -1, -1, 0, 0, 1, // Bottom Left
- 1, -1, 0, 1, 1, // Bottom Right
- 1, 1, 0, 1, 0, // Top Right
- -1, 1, 0, 0, 0}; // Top Left
-
- memcpy(vertices_, vertices, sizeof(vertices_));
-}
-
-OpenGles20::~OpenGles20() {
- if (program_) {
- glDeleteTextures(3, texture_ids_);
- glDeleteProgram(program_);
- }
-}
-
-bool OpenGles20::Setup(int32_t width, int32_t height) {
- program_ = CreateProgram(vertext_shader_, fragment_shader_);
- if (!program_) {
- return false;
- }
-
- int position_handle = glGetAttribLocation(program_, "aPosition");
- int texture_handle = glGetAttribLocation(program_, "aTextureCoord");
-
- // set the vertices array in the shader
- // vertices_ contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(
- position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_);
-
- glEnableVertexAttribArray(position_handle);
-
- // set the texture coordinate array in the shader
- // vertices_ contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(
- texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]);
- glEnableVertexAttribArray(texture_handle);
-
- glUseProgram(program_);
- int i = glGetUniformLocation(program_, "Ytex");
- glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
-
- i = glGetUniformLocation(program_, "Utex");
- glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
-
- i = glGetUniformLocation(program_, "Vtex");
- glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
-
- glViewport(0, 0, width, height);
- return true;
-}
-
-bool OpenGles20::SetCoordinates(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 ||
- bottom < 0 || left > 1 || left < 0) {
- return false;
- }
-
- // Bottom Left
- vertices_[0] = (left * 2) - 1;
- vertices_[1] = -1 * (2 * bottom) + 1;
- vertices_[2] = z_order;
-
- // Bottom Right
- vertices_[5] = (right * 2) - 1;
- vertices_[6] = -1 * (2 * bottom) + 1;
- vertices_[7] = z_order;
-
- // Top Right
- vertices_[10] = (right * 2) - 1;
- vertices_[11] = -1 * (2 * top) + 1;
- vertices_[12] = z_order;
-
- // Top Left
- vertices_[15] = (left * 2) - 1;
- vertices_[16] = -1 * (2 * top) + 1;
- vertices_[17] = z_order;
-
- return true;
-}
-
-bool OpenGles20::Render(const VideoFrame& frame) {
- if (texture_width_ != (GLsizei)frame.width() ||
- texture_height_ != (GLsizei)frame.height()) {
- SetupTextures(frame);
- }
- UpdateTextures(frame);
-
- glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_);
-
- return true;
-}
-
-GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) {
- GLuint shader = glCreateShader(shader_type);
- if (shader) {
- glShaderSource(shader, 1, &shader_source, NULL);
- glCompileShader(shader);
-
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint info_len = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
- if (info_len) {
- char* buf = (char*)malloc(info_len);
- glGetShaderInfoLog(shader, info_len, NULL, buf);
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- 0,
- "%s: Could not compile shader %d: %s",
- __FUNCTION__,
- shader_type,
- buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- }
- return shader;
-}
-
-GLuint OpenGles20::CreateProgram(const char* vertex_source,
- const char* fragment_source) {
- GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source);
- if (!vertex_shader) {
- return -1;
- }
-
- GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source);
- if (!fragment_shader) {
- return -1;
- }
-
- GLuint program = glCreateProgram();
- if (program) {
- glAttachShader(program, vertex_shader);
- glAttachShader(program, fragment_shader);
- glLinkProgram(program);
- GLint link_status = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &link_status);
- if (link_status != GL_TRUE) {
- GLint info_len = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len);
- if (info_len) {
- char* buf = (char*)malloc(info_len);
- glGetProgramInfoLog(program, info_len, NULL, buf);
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- 0,
- "%s: Could not link program: %s",
- __FUNCTION__,
- buf);
- free(buf);
- }
- glDeleteProgram(program);
- program = 0;
- }
- }
-
- if (vertex_shader) {
- glDeleteShader(vertex_shader);
- }
-
- if (fragment_shader) {
- glDeleteShader(fragment_shader);
- }
-
- return program;
-}
-
-static void InitializeTexture(int name, int id, int width, int height) {
- glActiveTexture(name);
- glBindTexture(GL_TEXTURE_2D, id);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- GL_LUMINANCE,
- width,
- height,
- 0,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- NULL);
-}
-
-void OpenGles20::SetupTextures(const VideoFrame& frame) {
- const GLsizei width = frame.width();
- const GLsizei height = frame.height();
-
- if (!texture_ids_[0]) {
- glGenTextures(3, texture_ids_); // Generate the Y, U and V texture
- }
-
- InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height);
- InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2);
- InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2);
-
- texture_width_ = width;
- texture_height_ = height;
-}
-
-// Uploads a plane of pixel data, accounting for stride != width*bpp.
-static void GlTexSubImage2D(GLsizei width,
- GLsizei height,
- int stride,
- const uint8_t* plane) {
- if (stride == width) {
- // Yay! We can upload the entire plane in a single GL call.
- glTexSubImage2D(GL_TEXTURE_2D,
- 0,
- 0,
- 0,
- width,
- height,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane));
- } else {
- // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't
- // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
- for (int row = 0; row < height; ++row) {
- glTexSubImage2D(GL_TEXTURE_2D,
- 0,
- 0,
- row,
- width,
- 1,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane + (row * stride)));
- }
- }
-}
-
-void OpenGles20::UpdateTextures(const VideoFrame& frame) {
- const GLsizei width = frame.width();
- const GLsizei height = frame.height();
-
- glActiveTexture(GL_TEXTURE0);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[0]);
- GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane));
-
- glActiveTexture(GL_TEXTURE1);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[1]);
- GlTexSubImage2D(
- width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[2]);
- GlTexSubImage2D(
- width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane));
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h
deleted file mode 100644
index a15ba393dc0..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-
-namespace webrtc {
-
-class VideoRenderIosGles20;
-
-class VideoRenderIosChannel : public VideoRenderCallback {
- public:
- explicit VideoRenderIosChannel(VideoRenderIosView* view);
- virtual ~VideoRenderIosChannel();
-
- // Implementation of VideoRenderCallback.
- int32_t RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame) override;
-
- int SetStreamSettings(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom);
- bool IsUpdated();
- bool RenderOffScreenBuffer();
-
- private:
- VideoRenderIosView* view_;
- VideoFrame* current_frame_;
- bool buffer_is_updated_;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
deleted file mode 100644
index b2b15857f93..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
-
-using namespace webrtc;
-
-VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
- : view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) {
-}
-
-VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
-
-int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame) {
- current_frame_->CopyFrame(video_frame);
- current_frame_->set_render_time_ms(0);
- buffer_is_updated_ = true;
-
- return 0;
-}
-
-bool VideoRenderIosChannel::RenderOffScreenBuffer() {
- if (![view_ renderFrame:current_frame_]) {
- return false;
- }
-
- buffer_is_updated_ = false;
-
- return true;
-}
-
-bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; }
-
-int VideoRenderIosChannel::SetStreamSettings(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if (![view_ setCoordinatesForZOrder:z_order
- Left:left
- Top:bottom
- Right:right
- Bottom:top]) {
-
- return -1;
- }
-
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h
deleted file mode 100644
index d4e04e79d73..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
-
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class EventTimerWrapper;
-
-class VideoRenderIosGles20 {
- public:
- VideoRenderIosGles20(VideoRenderIosView* view,
- bool full_screen,
- int render_id);
- virtual ~VideoRenderIosGles20();
-
- int Init();
- VideoRenderIosChannel* CreateEaglChannel(int channel,
- int z_order,
- float left,
- float top,
- float right,
- float bottom);
- int DeleteEaglChannel(int channel);
- bool HasChannel(int channel);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect); // NOLINT
-
- int GetScreenResolution(uint& screen_width, uint& screen_height); // NOLINT
- int SetStreamCropping(const uint stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- int ChangeWindow(void* new_window);
- int StartRender();
- int StopRender();
-
- protected:
- static bool ScreenUpdateThreadProc(void* obj);
-
- private:
- bool RenderOffScreenBuffers();
- int SwapAndDisplayBuffers();
-
- private:
- std::unique_ptr<CriticalSectionWrapper> gles_crit_sec_;
- EventTimerWrapper* screen_update_event_;
- // TODO(pbos): Remove unique_ptr and use member directly.
- std::unique_ptr<rtc::PlatformThread> screen_update_thread_;
-
- VideoRenderIosView* view_;
- Rect window_rect_;
- int window_width_;
- int window_height_;
- bool is_full_screen_;
- GLint backing_width_;
- GLint backing_height_;
- GLuint view_renderbuffer_;
- GLuint view_framebuffer_;
- GLuint depth_renderbuffer_;
- std::map<int, VideoRenderIosChannel*> agl_channels_;
- std::multimap<int, int> z_order_to_channel_;
- EAGLContext* gles_context_;
- bool is_rendering_;
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
deleted file mode 100644
index 6ad5db8b8cb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-
-using namespace webrtc;
-
-VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
- bool full_screen,
- int render_id)
- : gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()),
- screen_update_event_(0),
- view_(view),
- window_rect_(),
- window_width_(0),
- window_height_(0),
- is_full_screen_(full_screen),
- agl_channels_(),
- z_order_to_channel_(),
- gles_context_([view context]),
- is_rendering_(true) {
- screen_update_thread_.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateGles20"));
- screen_update_event_ = EventTimerWrapper::Create();
- GetWindowRect(window_rect_);
-}
-
-VideoRenderIosGles20::~VideoRenderIosGles20() {
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* thread_wrapper = screen_update_thread_.release();
-
- if (thread_wrapper) {
- screen_update_event_->Set();
- screen_update_event_->StopTimer();
-
- thread_wrapper->Stop();
- delete thread_wrapper;
- delete screen_update_event_;
- screen_update_event_ = NULL;
- is_rendering_ = FALSE;
- }
-
- // Delete all channels
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- delete it->second;
- agl_channels_.erase(it);
- it = agl_channels_.begin();
- }
- agl_channels_.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
- while (z_it != z_order_to_channel_.end()) {
- z_order_to_channel_.erase(z_it);
- z_it = z_order_to_channel_.begin();
- }
- z_order_to_channel_.clear();
-}
-
-int VideoRenderIosGles20::Init() {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!view_) {
- view_ = [[VideoRenderIosView alloc] init];
- }
-
- if (![view_ createContext]) {
- return -1;
- }
-
- screen_update_thread_->Start();
- screen_update_thread_->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitor_freq = 60;
- screen_update_event_->StartTimer(true, 1000 / monitor_freq);
-
- window_width_ = window_rect_.right - window_rect_.left;
- window_height_ = window_rect_.bottom - window_rect_.top;
-
- return 0;
-}
-
-VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel,
- int z_order,
- float left,
- float top,
- float right,
- float bottom) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (HasChannel(channel)) {
- return NULL;
- }
-
- VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_);
-
- if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) ==
- -1) {
- return NULL;
- }
-
- agl_channels_[channel] = new_eagl_channel;
- z_order_to_channel_.insert(std::pair<int, int>(z_order, channel));
-
- return new_eagl_channel;
-}
-
-int VideoRenderIosGles20::DeleteEaglChannel(int channel) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- std::map<int, VideoRenderIosChannel*>::iterator it;
- it = agl_channels_.find(channel);
- if (it != agl_channels_.end()) {
- delete it->second;
- agl_channels_.erase(it);
- } else {
- return -1;
- }
-
- std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
- while (z_it != z_order_to_channel_.end()) {
- if (z_it->second == channel) {
- z_order_to_channel_.erase(z_it);
- break;
- }
- z_it++;
- }
-
- return 0;
-}
-
-bool VideoRenderIosGles20::HasChannel(int channel) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- std::map<int, VideoRenderIosChannel*>::iterator it =
- agl_channels_.find(channel);
-
- if (it != agl_channels_.end()) {
- return true;
- }
-
- return false;
-}
-
-// Rendering process
-bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) {
- return static_cast<VideoRenderIosGles20*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderIosGles20::ScreenUpdateProcess() {
- screen_update_event_->Wait(100);
-
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!is_rendering_) {
- return false;
- }
-
- if (!screen_update_thread_) {
- return false;
- }
-
- if (GetWindowRect(window_rect_) == -1) {
- return true;
- }
-
- if (window_width_ != (window_rect_.right - window_rect_.left) ||
- window_height_ != (window_rect_.bottom - window_rect_.top)) {
- window_width_ = window_rect_.right - window_rect_.left;
- window_height_ = window_rect_.bottom - window_rect_.top;
- }
-
- // Check if there are any updated buffers
- bool updated = false;
-
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- VideoRenderIosChannel* agl_channel = it->second;
-
- updated = agl_channel->IsUpdated();
- if (updated) {
- break;
- }
- it++;
- }
-
- if (updated) {
- // At least one buffer has been updated, we need to repaint the texture
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator r_it =
- z_order_to_channel_.rbegin();
- r_it != z_order_to_channel_.rend();
- r_it++) {
- int channel_id = r_it->second;
- std::map<int, VideoRenderIosChannel*>::iterator it =
- agl_channels_.find(channel_id);
-
- VideoRenderIosChannel* agl_channel = it->second;
-
- agl_channel->RenderOffScreenBuffer();
- }
-
- [view_ presentFramebuffer];
- }
-
- return true;
-}
-
-int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!view_) {
- return -1;
- }
-
- CGRect bounds = [view_ bounds];
- rect.top = bounds.origin.y;
- rect.left = bounds.origin.x;
- rect.bottom = bounds.size.height + bounds.origin.y;
- rect.right = bounds.size.width + bounds.origin.x;
-
- return 0;
-}
-
-int VideoRenderIosGles20::ChangeWindow(void* new_window) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- view_ = (__bridge VideoRenderIosView*)new_window;
-
- return 0;
-}
-
-int VideoRenderIosGles20::StartRender() {
- is_rendering_ = true;
- return 0;
-}
-
-int VideoRenderIosGles20::StopRender() {
- is_rendering_ = false;
- return 0;
-}
-
-int VideoRenderIosGles20::GetScreenResolution(uint& screen_width,
- uint& screen_height) {
- screen_width = [view_ bounds].size.width;
- screen_height = [view_ bounds].size.height;
- return 0;
-}
-
-int VideoRenderIosGles20::SetStreamCropping(const uint stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- // Check if there are any updated buffers
- // bool updated = false;
- uint counter = 0;
-
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- if (counter == stream_id) {
- VideoRenderIosChannel* agl_channel = it->second;
- agl_channel->SetStreamSettings(0, left, top, right, bottom);
- }
- counter++;
- it++;
- }
-
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
deleted file mode 100644
index 04a74933008..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
-
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-
-class VideoRenderIosGles20;
-class CriticalSectionWrapper;
-
-class VideoRenderIosImpl : IVideoRender {
- public:
- explicit VideoRenderIosImpl(const int32_t id,
- void* window,
- const bool full_screen);
-
- ~VideoRenderIosImpl();
-
- // Implementation of IVideoRender.
- int32_t Init() override;
- int32_t ChangeWindow(void* window) override;
-
- VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id,
- const uint32_t z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
-
- int32_t DeleteIncomingRenderStream(const uint32_t stream_id) override;
-
- int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id,
- uint32_t& z_order,
- float& left,
- float& top,
- float& right,
- float& bottom) const override;
-
- int32_t StartRender() override;
- int32_t StopRender() override;
-
- VideoRenderType RenderType() override;
- RawVideoType PerferedVideoType() override;
- bool FullScreen() override;
- int32_t GetGraphicsMemory(
- uint64_t& total_graphics_memory,
- uint64_t& available_graphics_memory) const override; // NOLINT
- int32_t GetScreenResolution(
- uint32_t& screen_width,
- uint32_t& screen_height) const override; // NOLINT
- uint32_t RenderFrameRate(const uint32_t stream_id);
- int32_t SetStreamCropping(const uint32_t stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t ConfigureRenderer(const uint32_t stream_id,
- const unsigned int z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t SetTransparentBackground(const bool enable) override;
- int32_t SetText(const uint8_t text_id,
- const uint8_t* text,
- const int32_t text_length,
- const uint32_t text_color_ref,
- const uint32_t background_color_ref,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t SetBitmap(const void* bit_map,
- const uint8_t picture_id,
- const void* color_key,
- const float left,
- const float top,
- const float right,
- const float bottom);
- int32_t FullScreenRender(void* window, const bool enable);
-
- private:
- int32_t id_;
- void* ptr_window_;
- bool full_screen_;
-
- CriticalSectionWrapper* crit_sec_;
- std::unique_ptr<VideoRenderIosGles20> ptr_ios_render_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
deleted file mode 100644
index 0ef411d56f8..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-#define IOS_UNSUPPORTED() \
- WEBRTC_TRACE(kTraceError, \
- kTraceVideoRenderer, \
- id_, \
- "%s is not supported on the iOS platform.", \
- __FUNCTION__); \
- return -1;
-
-VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
- void* window,
- const bool full_screen)
- : id_(id),
- ptr_window_(window),
- full_screen_(full_screen),
- crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {}
-
-VideoRenderIosImpl::~VideoRenderIosImpl() {
- delete crit_sec_;
-}
-
-int32_t VideoRenderIosImpl::Init() {
- CriticalSectionScoped cs(crit_sec_);
-
- ptr_ios_render_.reset(new VideoRenderIosGles20(
- (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
-
- return ptr_ios_render_->Init();
- ;
-}
-
-int32_t VideoRenderIosImpl::ChangeWindow(void* window) {
- CriticalSectionScoped cs(crit_sec_);
- if (window == NULL) {
- return -1;
- }
-
- ptr_window_ = window;
-
- return ptr_ios_render_->ChangeWindow(ptr_window_);
-}
-
-VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream(
- const uint32_t stream_id,
- const uint32_t z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- CriticalSectionScoped cs(crit_sec_);
- if (!ptr_window_) {
- return NULL;
- }
-
- return ptr_ios_render_->CreateEaglChannel(
- stream_id, z_order, left, top, right, bottom);
-}
-
-int32_t VideoRenderIosImpl::DeleteIncomingRenderStream(
- const uint32_t stream_id) {
- CriticalSectionScoped cs(crit_sec_);
-
- return ptr_ios_render_->DeleteEaglChannel(stream_id);
-}
-
-int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties(
- const uint32_t stream_id,
- uint32_t& z_order,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::StartRender() {
- return ptr_ios_render_->StartRender();
-}
-
-int32_t VideoRenderIosImpl::StopRender() {
- return ptr_ios_render_->StopRender();
-}
-
-VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; }
-
-RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; }
-
-bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); }
-
-int32_t VideoRenderIosImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const {
- return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) {
- IOS_UNSUPPORTED();
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
deleted file mode 100644
index d110bc78bd2..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
-
-#import <UIKit/UIKit.h>
-#import <QuartzCore/QuartzCore.h>
-
-#include "webrtc/modules/video_render/ios/open_gles20.h"
-
-@interface VideoRenderIosView : UIView
-
-- (BOOL)createContext;
-- (BOOL)presentFramebuffer;
-- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender;
-- (BOOL)setCoordinatesForZOrder:(const float)zOrder
- Left:(const float)left
- Top:(const float)top
- Right:(const float)right
- Bottom:(const float)bottom;
-
-@property(nonatomic, retain) EAGLContext* context;
-
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
deleted file mode 100644
index b106ffa5c4f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include <memory>
-
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation VideoRenderIosView {
- EAGLContext* _context;
- std::unique_ptr<webrtc::OpenGles20> _gles_renderer20;
- int _frameBufferWidth;
- int _frameBufferHeight;
- unsigned int _defaultFrameBuffer;
- unsigned int _colorRenderBuffer;
-}
-
-@synthesize context = context_;
-
-+ (Class)layerClass {
- return [CAEAGLLayer class];
-}
-
-- (id)initWithCoder:(NSCoder*)coder {
- // init super class
- self = [super initWithCoder:coder];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (id)init {
- // init super class
- self = [super init];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (id)initWithFrame:(CGRect)frame {
- // init super class
- self = [super initWithFrame:frame];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (void)dealloc {
- if (_defaultFrameBuffer) {
- glDeleteFramebuffers(1, &_defaultFrameBuffer);
- _defaultFrameBuffer = 0;
- }
-
- if (_colorRenderBuffer) {
- glDeleteRenderbuffers(1, &_colorRenderBuffer);
- _colorRenderBuffer = 0;
- }
-
- [EAGLContext setCurrentContext:nil];
-}
-
-- (NSString*)description {
- return [NSString stringWithFormat:
- @"A WebRTC implemented subclass of UIView."
- "+Class method is overwritten, along with custom methods"];
-}
-
-- (BOOL)createContext {
- // create OpenGLES context from self layer class
- CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer;
- eagl_layer.opaque = YES;
- eagl_layer.drawableProperties =
- [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
- kEAGLDrawablePropertyRetainedBacking,
- kEAGLColorFormatRGBA8,
- kEAGLDrawablePropertyColorFormat,
- nil];
- _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
-
- if (!_context) {
- return NO;
- }
-
- if (![EAGLContext setCurrentContext:_context]) {
- return NO;
- }
-
- // generates and binds the OpenGLES buffers
- glGenFramebuffers(1, &_defaultFrameBuffer);
- glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
-
- // Create color render buffer and allocate backing store.
- glGenRenderbuffers(1, &_colorRenderBuffer);
- glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
- [_context renderbufferStorage:GL_RENDERBUFFER
- fromDrawable:(CAEAGLLayer*)self.layer];
- glGetRenderbufferParameteriv(
- GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
- glGetRenderbufferParameteriv(
- GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight);
- glFramebufferRenderbuffer(GL_FRAMEBUFFER,
- GL_COLOR_ATTACHMENT0,
- GL_RENDERBUFFER,
- _colorRenderBuffer);
-
- if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
- return NO;
- }
-
- // set the frame buffer
- glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
- glViewport(0, 0, self.frame.size.width, self.frame.size.height);
-
- return _gles_renderer20->Setup([self bounds].size.width,
- [self bounds].size.height);
-}
-
-- (BOOL)presentFramebuffer {
- if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- 0,
- "%s:%d [context present_renderbuffer] "
- "returned false",
- __FUNCTION__,
- __LINE__);
- }
- return YES;
-}
-
-- (BOOL)renderFrame:(VideoFrame*)frameToRender {
- if (![EAGLContext setCurrentContext:_context]) {
- return NO;
- }
-
- return _gles_renderer20->Render(*frameToRender);
-}
-
-- (BOOL)setCoordinatesForZOrder:(const float)zOrder
- Left:(const float)left
- Top:(const float)top
- Right:(const float)right
- Bottom:(const float)bottom {
- return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
-}
-
-@end
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc
deleted file mode 100644
index 7e53dfdf809..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
-
-#include "webrtc/modules/video_render/linux/video_x11_render.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-#include <X11/Xlib.h>
-
-namespace webrtc {
-
-VideoRenderLinuxImpl::VideoRenderLinuxImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen) :
- _id(id),
- _renderLinuxCritsect(
- *CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _ptrX11Render(NULL)
-{
-}
-
-VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
-{
- if (_ptrX11Render)
- delete _ptrX11Render;
-
- delete &_renderLinuxCritsect;
-}
-
-int32_t VideoRenderLinuxImpl::Init()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(&_renderLinuxCritsect);
- _ptrX11Render = new VideoX11Render((Window) _ptrWindow);
- if (!_ptrX11Render)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s",
- "Failed to create instance of VideoX11Render object");
- return -1;
- }
- int retVal = _ptrX11Render->Init();
- if (retVal == -1)
- {
- return -1;
- }
-
- return 0;
-
-}
-
-int32_t VideoRenderLinuxImpl::ChangeWindow(void* window)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(&_renderLinuxCritsect);
- _ptrWindow = window;
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->ChangeWindow((Window) window);
- }
-
- return -1;
-}
-
-VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
- const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- VideoRenderCallback* renderCallback = NULL;
- if (_ptrX11Render)
- {
- VideoX11Channel* renderChannel =
- _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
- top, right, bottom);
- if (!renderChannel)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "Render channel creation failed for stream id: %d",
- streamId);
- return NULL;
- }
- renderCallback = (VideoRenderCallback *) renderChannel;
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "_ptrX11Render is NULL");
- return NULL;
- }
- return renderCallback;
-}
-
-int32_t VideoRenderLinuxImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->DeleteX11RenderChannel(streamId);
- }
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
- }
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::StartRender()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- return 0;
-}
-
-int32_t VideoRenderLinuxImpl::StopRender()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- return 0;
-}
-
-VideoRenderType VideoRenderLinuxImpl::RenderType()
-{
- return kRenderX11;
-}
-
-RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderLinuxImpl::FullScreen()
-{
- return false;
-}
-
-int32_t VideoRenderLinuxImpl::GetGraphicsMemory(
- uint64_t& /*totalGraphicsMemory*/,
- uint64_t& /*availableGraphicsMemory*/) const
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::GetScreenResolution(
- uint32_t& /*screenWidth*/,
- uint32_t& /*screenHeight*/) const
-{
- return -1;
-}
-
-uint32_t VideoRenderLinuxImpl::RenderFrameRate(const uint32_t /*streamId*/)
-{
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetStreamCropping(
- const uint32_t /*streamId*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h b/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h
deleted file mode 100644
index 0e9ae54c18f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class VideoX11Render;
-
-// Class definitions
-class VideoRenderLinuxImpl: IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderLinuxImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderLinuxImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderLinuxCritsect;
-
- void* _ptrWindow;
-
- // X11 Render
- VideoX11Render* _ptrX11Render;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc
deleted file mode 100644
index 8d86b7c72ad..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc
+++ /dev/null
@@ -1,315 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_x11_channel.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-#define DISP_MAX 128
-
-static Display *dispArray[DISP_MAX];
-static int dispCount = 0;
-
-
-VideoX11Channel::VideoX11Channel(int32_t id) :
- _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
- _shminfo(), _image(NULL), _window(0L), _gc(NULL),
- _width(DEFAULT_RENDER_FRAME_WIDTH),
- _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
- _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
- _top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
- _Id(id)
-{
-}
-
-VideoX11Channel::~VideoX11Channel()
-{
- if (_prepared)
- {
- _crit.Enter();
- ReleaseWindow();
- _crit.Leave();
- }
- delete &_crit;
-}
-
-int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_crit);
- if (_width != videoFrame.width() || _height
- != videoFrame.height()) {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- return -1;
- }
- }
- return DeliverFrame(videoFrame);
-}
-
-int32_t VideoX11Channel::FrameSizeChange(int32_t width,
- int32_t height,
- int32_t /*numberOfStreams */)
-{
- CriticalSectionScoped cs(&_crit);
- if (_prepared)
- {
- RemoveRenderer();
- }
- if (CreateLocalRenderer(width, height) == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_crit);
- if (!_prepared) {
- return 0;
- }
-
- if (!dispArray[_dispCount]) {
- return -1;
- }
-
- ConvertFromI420(videoFrame, kARGB, 0, _buffer);
-
- // Put image in window.
- XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
- _height, True);
-
- // Very important for the image to update properly!
- XSync(_display, False);
- return 0;
-}
-
-int32_t VideoX11Channel::GetFrameSize(int32_t& width, int32_t& height)
-{
- width = _width;
- height = _height;
-
- return 0;
-}
-
-int32_t VideoX11Channel::Init(Window window, float left, float top,
- float right, float bottom)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- _window = window;
- _left = left;
- _right = right;
- _top = top;
- _bottom = bottom;
-
- _display = XOpenDisplay(NULL); // Use default display
- if (!_window || !_display)
- {
- return -1;
- }
-
- if (dispCount < DISP_MAX)
- {
- dispArray[dispCount] = _display;
- _dispCount = dispCount;
- dispCount++;
- }
- else
- {
- return -1;
- }
-
- if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
- < 0) || (1 < bottom || bottom < 0))
- {
- return -1;
- }
-
- // calculate position and size of rendered video
- int x, y;
- unsigned int winWidth, winHeight, borderwidth, depth;
- Window rootret;
- if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
- &winHeight, &borderwidth, &depth) == 0)
- {
- return -1;
- }
-
- _xPos = (int32_t) (winWidth * left);
- _yPos = (int32_t) (winHeight * top);
- _outWidth = (int32_t) (winWidth * (right - left));
- _outHeight = (int32_t) (winHeight * (bottom - top));
- if (_outWidth % 2)
- _outWidth++; // the renderer want's sizes that are multiples of two
- if (_outHeight % 2)
- _outHeight++;
-
- _gc = XCreateGC(_display, _window, 0, 0);
- if (!_gc) {
- // Failed to create the graphics context.
- assert(false);
- return -1;
- }
-
- if (CreateLocalRenderer(winWidth, winHeight) == -1)
- {
- return -1;
- }
- return 0;
-
-}
-
-int32_t VideoX11Channel::ChangeWindow(Window window)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- // Stop the rendering, if we are rendering...
- RemoveRenderer();
- _window = window;
-
- // calculate position and size of rendered video
- int x, y;
- unsigned int winWidth, winHeight, borderwidth, depth;
- Window rootret;
- if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
- &winHeight, &borderwidth, &depth) == -1)
- {
- return -1;
- }
- _xPos = (int) (winWidth * _left);
- _yPos = (int) (winHeight * _top);
- _outWidth = (int) (winWidth * (_right - _left));
- _outHeight = (int) (winHeight * (_bottom - _top));
- if (_outWidth % 2)
- _outWidth++; // the renderer want's sizes that are multiples of two
- if (_outHeight % 2)
- _outHeight++;
-
- // Prepare rendering using the
- if (CreateLocalRenderer(_width, _height) == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t VideoX11Channel::ReleaseWindow()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- RemoveRenderer();
- if (_gc) {
- XFreeGC(_display, _gc);
- _gc = NULL;
- }
- if (_display)
- {
- XCloseDisplay(_display);
- _display = NULL;
- }
- return 0;
-}
-
-int32_t VideoX11Channel::CreateLocalRenderer(int32_t width, int32_t height)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- if (!_window || !_display)
- {
- return -1;
- }
-
- if (_prepared)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
- "Renderer already prepared, exits.");
- return -1;
- }
-
- _width = width;
- _height = height;
-
- // create shared memory image
- _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
- &_shminfo, _width, _height); // this parameter needs to be the same for some reason.
- _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
- * _image->height), IPC_CREAT | 0777);
- _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
- if (_image->data == reinterpret_cast<char*>(-1))
- {
- return -1;
- }
- _buffer = (unsigned char*) _image->data;
- _shminfo.readOnly = False;
-
- // attach image to display
- if (!XShmAttach(_display, &_shminfo))
- {
- //printf("XShmAttach failed !\n");
- return -1;
- }
- XSync(_display, False);
-
- _prepared = true;
- return 0;
-}
-
-int32_t VideoX11Channel::RemoveRenderer()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
-
- if (!_prepared)
- {
- return 0;
- }
- _prepared = false;
-
- // Free the memory.
- XShmDetach(_display, &_shminfo);
- XDestroyImage( _image );
- _image = NULL;
- shmdt(_shminfo.shmaddr);
- _shminfo.shmaddr = NULL;
- _buffer = NULL;
- shmctl(_shminfo.shmid, IPC_RMID, 0);
- _shminfo.shmid = 0;
- return 0;
-}
-
-int32_t VideoX11Channel::GetStreamProperties(uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
-
- zOrder = 0; // no z-order support yet
- left = _left;
- top = _top;
- right = _right;
- bottom = _bottom;
-
- return 0;
-}
-
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h
deleted file mode 100644
index 6eb402e12ee..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
-
-#include <sys/shm.h>
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <X11/extensions/XShm.h>
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-#define DEFAULT_RENDER_FRAME_WIDTH 352
-#define DEFAULT_RENDER_FRAME_HEIGHT 288
-
-
-class VideoX11Channel: public VideoRenderCallback
-{
-public:
- VideoX11Channel(int32_t id);
-
- virtual ~VideoX11Channel();
-
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- int32_t FrameSizeChange(int32_t width, int32_t height,
- int32_t numberOfStreams);
- int32_t DeliverFrame(const VideoFrame& videoFrame);
- int32_t GetFrameSize(int32_t& width, int32_t& height);
- int32_t Init(Window window, float left, float top, float right,
- float bottom);
- int32_t ChangeWindow(Window window);
- int32_t
- GetStreamProperties(uint32_t& zOrder, float& left,
- float& top, float& right, float& bottom) const;
- int32_t ReleaseWindow();
-
- bool IsPrepared()
- {
- return _prepared;
- }
-
-private:
-
- int32_t
- CreateLocalRenderer(int32_t width, int32_t height);
- int32_t RemoveRenderer();
-
- //FIXME a better place for this method? the GetWidthHeight no longer
- // supported by common_video.
- int GetWidthHeight(VideoType type, int bufferSize, int& width,
- int& height);
-
- CriticalSectionWrapper& _crit;
-
- Display* _display;
- XShmSegmentInfo _shminfo;
- XImage* _image;
- Window _window;
- GC _gc;
- int32_t _width; // incoming frame width
- int32_t _height; // incoming frame height
- int32_t _outWidth; // render frame width
- int32_t _outHeight; // render frame height
- int32_t _xPos; // position within window
- int32_t _yPos;
- bool _prepared; // true if ready to use
- int32_t _dispCount;
-
- unsigned char* _buffer;
- float _top;
- float _left;
- float _right;
- float _bottom;
-
- int32_t _Id;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc
deleted file mode 100644
index 5eb4f36f95f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_x11_channel.h"
-#include "webrtc/modules/video_render/linux/video_x11_render.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoX11Render::VideoX11Render(Window window) :
- _window(window),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection())
-{
-}
-
-VideoX11Render::~VideoX11Render()
-{
- delete &_critSect;
-}
-
-int32_t VideoX11Render::Init()
-{
- CriticalSectionScoped cs(&_critSect);
-
- _streamIdToX11ChannelMap.clear();
-
- return 0;
-}
-
-int32_t VideoX11Render::ChangeWindow(Window window)
-{
- CriticalSectionScoped cs(&_critSect);
- VideoX11Channel* renderChannel = NULL;
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.begin();
-
- while (iter != _streamIdToX11ChannelMap.end())
- {
- renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->ChangeWindow(window);
- }
- iter++;
- }
-
- _window = window;
-
- return 0;
-}
-
-VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- VideoX11Channel* renderChannel = NULL;
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
-
- if (iter == _streamIdToX11ChannelMap.end())
- {
- renderChannel = new VideoX11Channel(streamId);
- if (!renderChannel)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- -1,
- "Failed to create VideoX11Channel for streamId : %d",
- streamId);
- return NULL;
- }
- renderChannel->Init(_window, left, top, right, bottom);
- _streamIdToX11ChannelMap[streamId] = renderChannel;
- }
- else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
- "Render Channel already exists for streamId: %d", streamId);
- renderChannel = iter->second;
- }
-
- return renderChannel;
-}
-
-int32_t VideoX11Render::DeleteX11RenderChannel(int32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
- if (iter != _streamIdToX11ChannelMap.end())
- {
- VideoX11Channel *renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->ReleaseWindow();
- delete renderChannel;
- renderChannel = NULL;
- }
- _streamIdToX11ChannelMap.erase(iter);
- }
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "No VideoX11Channel object exists for stream id: %d",
- streamId);
- return -1;
-}
-
-int32_t VideoX11Render::GetIncomingStreamProperties(
- int32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
- CriticalSectionScoped cs(&_critSect);
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
- if (iter != _streamIdToX11ChannelMap.end())
- {
- VideoX11Channel *renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
- }
- }
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "No VideoX11Channel object exists for stream id: %d",
- streamId);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h
deleted file mode 100644
index 23b83bd67bb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <X11/Xlib.h>
-#include <map>
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class VideoX11Channel;
-
-class VideoX11Render
-{
-
-public:
- VideoX11Render(Window window);
- ~VideoX11Render();
-
- int32_t Init();
- int32_t ChangeWindow(Window window);
-
- VideoX11Channel* CreateX11RenderChannel(int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- int32_t DeleteX11RenderChannel(int32_t streamId);
-
- int32_t GetIncomingStreamProperties(int32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom);
-
-private:
- Window _window;
- CriticalSectionWrapper& _critSect;
- std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
deleted file mode 100644
index c8e98bba674..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-//
-// cocoa_full_screen_window.h
-//
-//
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
-
-#import <Cocoa/Cocoa.h>
-//#define GRAB_ALL_SCREENS 1
-
-@interface CocoaFullScreenWindow : NSObject {
- NSWindow* _window;
-}
-
--(id)init;
--(void)grabFullScreen;
--(void)releaseFullScreen;
--(NSWindow*)window;
-
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
deleted file mode 100644
index b57223b4dfd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation CocoaFullScreenWindow
-
--(id)init{
-
- self = [super init];
- if(!self){
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__);
- return nil;
- }
-
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
- return self;
-}
-
--(void)grabFullScreen{
-
-#ifdef GRAB_ALL_SCREENS
- if(CGCaptureAllDisplays() != kCGErrorSuccess)
-#else
- if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
-#endif
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__);
- }
-
- // get the shielding window level
- int windowLevel = CGShieldingWindowLevel();
-
- // get the screen rect of main display
- NSRect screenRect = [[NSScreen mainScreen]frame];
-
- _window = [[NSWindow alloc]initWithContentRect:screenRect
- styleMask:NSBorderlessWindowMask
- backing:NSBackingStoreBuffered
- defer:NO
- screen:[NSScreen mainScreen]];
-
- [_window setLevel:windowLevel];
- [_window setBackgroundColor:[NSColor blackColor]];
- [_window makeKeyAndOrderFront:nil];
-
-}
-
--(void)releaseFullScreen
-{
- [_window orderOut:self];
-
-#ifdef GRAB_ALL_SCREENS
- if(CGReleaseAllDisplays() != kCGErrorSuccess)
-#else
- if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
-#endif
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__);
- }
-}
-
-- (NSWindow*)window
-{
- return _window;
-}
-
-- (void) dealloc
-{
- [self releaseFullScreen];
- [super dealloc];
-}
-
-
-
-@end
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h
deleted file mode 100644
index 15a8108dec7..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-//
-// cocoa_render_view.h
-//
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
-
-#import <Cocoa/Cocoa.h>
-#import <OpenGL/gl.h>
-#import <OpenGL/glu.h>
-#import <OpenGL/OpenGL.h>
-
-@interface CocoaRenderView : NSOpenGLView {
- NSOpenGLContext* _nsOpenGLContext;
-}
-
--(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
--(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
--(NSOpenGLContext*)nsOpenGLContext;
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm
deleted file mode 100644
index 4631ff31a4f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Cocoa/Cocoa.h>
-#import <AppKit/AppKit.h>
-
-#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation CocoaRenderView
-
--(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
-
- self = [super initWithFrame:[self frame] pixelFormat:fmt];
- if (self == nil){
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
- }
-
-
- _nsOpenGLContext = [self openGLContext];
-
-}
-
--(NSOpenGLContext*)nsOpenGLContext {
- return _nsOpenGLContext;
-}
-
--(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
-
- NSRect screenRect = [[NSScreen mainScreen]frame];
-// [_windowRef setFrame:screenRect];
-// [_windowRef setBounds:screenRect];
- self = [super initWithFrame:screenRect pixelFormat:fmt];
- if (self == nil){
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
- }
-
- _nsOpenGLContext = [self openGLContext];
-
-}
-
-@end
-
-
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc b/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc
deleted file mode 100644
index 3243563b2bf..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc
+++ /dev/null
@@ -1,1987 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(CARBON_RENDERING)
-
-#include "webrtc/modules/video_render/mac/video_render_agl.h"
-
-// includes
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-/*
- *
- * VideoChannelAGL
- *
- */
-
-#pragma mark VideoChannelAGL constructor
-
-VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
- _aglContext( aglContext),
- _id( iId),
- _owner( owner),
- _width( 0),
- _height( 0),
- _stretchedWidth( 0),
- _stretchedHeight( 0),
- _startWidth( 0.0f),
- _startHeight( 0.0f),
- _stopWidth( 0.0f),
- _stopHeight( 0.0f),
- _xOldWidth( 0),
- _yOldHeight( 0),
- _oldStretchedHeight(0),
- _oldStretchedWidth( 0),
- _buffer( 0),
- _bufferSize( 0),
- _incomingBufferSize(0),
- _bufferIsUpdated( false),
- _sizeInitialized( false),
- _numberOfStreams( 0),
- _bVideoSizeStartedChanging(false),
- _pixelFormat( GL_RGBA),
- _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
- _texture( 0)
-
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
-}
-
-VideoChannelAGL::~VideoChannelAGL()
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
- if (_buffer)
- {
- delete [] _buffer;
- _buffer = NULL;
- }
-
- aglSetCurrentContext(_aglContext);
-
- if (_texture != 0)
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-}
-
-int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
- VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
- if (_width != videoFrame.width() ||
- _height != videoFrame.height()) {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
- Change returned an error", __FUNCTION__, __LINE__);
- _owner->UnlockAGLCntx();
- return -1;
- }
- }
-
- _owner->UnlockAGLCntx();
- return DeliverFrame(videoFrame);
-}
-
-int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
-{
- _owner->LockAGLCntx();
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
-{
-
- _owner->LockAGLCntx();
- _stretchedHeight = stretchHeight;
- _stretchedWidth = stretchWidth;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- // We'll get a new frame size from VideoAPI, prepare the buffer
-
- _owner->LockAGLCntx();
-
- if (width == _width && _height == height)
- {
- // We already have a correct buffer size
- _numberOfStreams = numberOfStreams;
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- _width = width;
- _height = height;
-
- // Delete the old buffer, create a new one with correct size.
- if (_buffer)
- {
- delete [] _buffer;
- _bufferSize = 0;
- }
-
- _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
- _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
- _buffer = new unsigned char [_bufferSize];
- memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
-
- if (aglSetCurrentContext(_aglContext) == false)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Delete a possible old texture
- if (_texture != 0)
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-
- // Create a new texture
- glGenTextures(1, (GLuint *) &_texture);
-
- GLenum glErr = glGetError();
-
- if (glErr != GL_NO_ERROR)
- {
- }
-
- // Do the setup for both textures
- // Note: we setup two textures even if we're not running full screen
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- // Set texture parameters
- glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
- //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
-
- glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
-
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
-
- // Maximum width/height for a texture
- GLint texSize;
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
-
- if (texSize < _width || texSize < _height)
- {
- // Image too big for memory
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Set up th texture type and size
- glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
- 0, // level
- GL_RGBA, // internal format
- _width, // width
- _height, // height
- 0, // border 0/1 = off/on
- _pixelFormat, // format, GL_BGRA
- _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
- _buffer); // pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-// Called from video engine when a new frame should be rendered.
-int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if (_texture == 0) {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
- _incomingBufferSize) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Setting stride = width.
- int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
- if (rgbret < 0) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- aglSetCurrentContext(_aglContext);
-
- // Put the new frame into the graphic card texture.
- // Make sure this texture is the active one
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Copy buffer to texture
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- if (glGetError() != GL_NO_ERROR) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
- _owner->UnlockAGLCntx();
-
- return 0;
-}
-
-int VideoChannelAGL::RenderOffScreenBuffer()
-{
-
- _owner->LockAGLCntx();
-
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- GLfloat xStart = 2.0f * _startWidth - 1.0f;
- GLfloat xStop = 2.0f * _stopWidth - 1.0f;
- GLfloat yStart = 1.0f - 2.0f * _stopHeight;
- GLfloat yStop = 1.0f - 2.0f * _startHeight;
-
- aglSetCurrentContext(_aglContext);
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
- {
- glViewport(0, 0, _stretchedWidth, _stretchedHeight);
- }
- _oldStretchedHeight = _stretchedHeight;
- _oldStretchedWidth = _stretchedWidth;
-
- // Now really put the texture into the framebuffer
- glLoadIdentity();
-
- glEnable(GL_TEXTURE_RECTANGLE_EXT);
-
- glBegin(GL_POLYGON);
- {
- glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
- glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
- glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
- glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
- }
- glEnd();
-
- glDisable(GL_TEXTURE_RECTANGLE_EXT);
-
- _bufferIsUpdated = false;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::IsUpdated(bool& isUpdated)
-{
- _owner->LockAGLCntx();
- isUpdated = _bufferIsUpdated;
- _owner->UnlockAGLCntx();
-
- return 0;
-}
-
-int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- _owner->LockAGLCntx();
-
- _startWidth = startWidth;
- _stopWidth = stopWidth;
- _startHeight = startHeight;
- _stopHeight = stopHeight;
-
- int oldWidth = _width;
- int oldHeight = _height;
- int oldNumberOfStreams = _numberOfStreams;
-
- _width = 0;
- _height = 0;
-
- int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
-
- _owner->UnlockAGLCntx();
-
- return retVal;
-}
-
-int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
-{
- return -1;
-}
-
-#pragma mark VideoRenderAGL WindowRef constructor
-
-VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
-_hiviewRef( 0),
-_windowRef( windowRef),
-_fullScreen( fullscreen),
-_id( iId),
-_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent( 0),
-_isHIViewRef( false),
-_aglContext( 0),
-_windowWidth( 0),
-_windowHeight( 0),
-_lastWindowWidth( -1),
-_lastWindowHeight( -1),
-_lastHiViewWidth( -1),
-_lastHiViewHeight( -1),
-_currentParentWindowHeight( 0),
-_currentParentWindowWidth( 0),
-_currentParentWindowBounds( ),
-_windowHasResized( false),
-_lastParentWindowBounds( ),
-_currentHIViewBounds( ),
-_lastHIViewBounds( ),
-_windowRect( ),
-_aglChannels( ),
-_zOrderToChannel( ),
-_hiviewEventHandlerRef( NULL),
-_windowEventHandlerRef( NULL),
-_currentViewBounds( ),
-_lastViewBounds( ),
-_renderingIsPaused( false),
-
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
-
- _screenUpdateThread.reset(
- new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
- _screenUpdateEvent = EventWrapper::Create();
-
- if(!IsValidWindowPtr(_windowRef))
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
- }
- else
- {
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
- }
-
- GetWindowRect(_windowRect);
-
- _lastViewBounds.origin.x = 0;
- _lastViewBounds.origin.y = 0;
- _lastViewBounds.size.width = 0;
- _lastViewBounds.size.height = 0;
-
-}
-
-// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
-// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
-#pragma mark WindowRef Event Handler
-pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
- EventRef theEvent,
- void* userData)
-{
- WindowRef windowRef = NULL;
-
- int eventType = GetEventKind(theEvent);
-
- // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
- GetEventParameter (theEvent,
- kEventParamDirectObject,
- typeWindowRef,
- NULL,
- sizeof (WindowRef),
- NULL,
- &windowRef);
-
- VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
-
- bool updateUI = true;
- if(kEventWindowBoundsChanged == eventType)
- {
- }
- else if(kEventWindowBoundsChanging == eventType)
- {
- }
- else if(kEventWindowZoomed == eventType)
- {
- }
- else if(kEventWindowExpanding == eventType)
- {
- }
- else if(kEventWindowExpanded == eventType)
- {
- }
- else if(kEventWindowClickResizeRgn == eventType)
- {
- }
- else if(kEventWindowClickDragRgn == eventType)
- {
- }
- else
- {
- updateUI = false;
- }
-
- if(true == updateUI)
- {
- obj->ParentWindowResized(windowRef);
- obj->UpdateClipping();
- obj->RenderOffScreenBuffers();
- }
-
- return noErr;
-}
-
-#pragma mark VideoRenderAGL HIViewRef constructor
-
-VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
-_hiviewRef( windowRef),
-_windowRef( 0),
-_fullScreen( fullscreen),
-_id( iId),
-_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent( 0),
-_isHIViewRef( false),
-_aglContext( 0),
-_windowWidth( 0),
-_windowHeight( 0),
-_lastWindowWidth( -1),
-_lastWindowHeight( -1),
-_lastHiViewWidth( -1),
-_lastHiViewHeight( -1),
-_currentParentWindowHeight( 0),
-_currentParentWindowWidth( 0),
-_currentParentWindowBounds( ),
-_windowHasResized( false),
-_lastParentWindowBounds( ),
-_currentHIViewBounds( ),
-_lastHIViewBounds( ),
-_windowRect( ),
-_aglChannels( ),
-_zOrderToChannel( ),
-_hiviewEventHandlerRef( NULL),
-_windowEventHandlerRef( NULL),
-_currentViewBounds( ),
-_lastViewBounds( ),
-_renderingIsPaused( false),
-{
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
- // _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
-
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
- _screenUpdateEvent = EventWrapper::Create();
-
- GetWindowRect(_windowRect);
-
- _lastViewBounds.origin.x = 0;
- _lastViewBounds.origin.y = 0;
- _lastViewBounds.size.width = 0;
- _lastViewBounds.size.height = 0;
-
-#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
- // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
- // The event handler looks for window resize events and adjusts the offset of the controls.
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
-
-
- static const EventTypeSpec windowEventTypes[] =
- {
- kEventClassWindow, kEventWindowBoundsChanged,
- kEventClassWindow, kEventWindowBoundsChanging,
- kEventClassWindow, kEventWindowZoomed,
- kEventClassWindow, kEventWindowExpanded,
- kEventClassWindow, kEventWindowClickResizeRgn,
- kEventClassWindow, kEventWindowClickDragRgn
- };
-
- WindowRef parentWindow = HIViewGetWindow(windowRef);
-
- InstallWindowEventHandler (parentWindow,
- NewEventHandlerUPP (sHandleWindowResized),
- GetEventTypeCount(windowEventTypes),
- windowEventTypes,
- (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
- &_windowEventHandlerRef);
-
-#endif
-
-#ifdef NEW_HIVIEW_EVENT_HANDLER
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
-
- static const EventTypeSpec hiviewEventTypes[] =
- {
- kEventClassControl, kEventControlBoundsChanged,
- kEventClassControl, kEventControlDraw
- // kEventControlDragLeave
- // kEventControlDragReceive
- // kEventControlGetFocusPart
- // kEventControlApplyBackground
- // kEventControlDraw
- // kEventControlHit
-
- };
-
- HIViewInstallEventHandler(_hiviewRef,
- NewEventHandlerUPP(sHandleHiViewResized),
- GetEventTypeCount(hiviewEventTypes),
- hiviewEventTypes,
- (void *) this,
- &_hiviewEventHandlerRef);
-
-#endif
-}
-
-// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
-// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
-#pragma mark HIViewRef Event Handler
-pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
-{
- //static int callbackCounter = 1;
- HIViewRef hiviewRef = NULL;
-
- // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
- int eventType = GetEventKind(theEvent);
- OSStatus status = noErr;
- status = GetEventParameter (theEvent,
- kEventParamDirectObject,
- typeControlRef,
- NULL,
- sizeof (ControlRef),
- NULL,
- &hiviewRef);
-
- VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
- WindowRef parentWindow = HIViewGetWindow(hiviewRef);
- bool updateUI = true;
-
- if(kEventControlBoundsChanged == eventType)
- {
- }
- else if(kEventControlDraw == eventType)
- {
- }
- else
- {
- updateUI = false;
- }
-
- if(true == updateUI)
- {
- obj->ParentWindowResized(parentWindow);
- obj->UpdateClipping();
- obj->RenderOffScreenBuffers();
- }
-
- return status;
-}
-
-VideoRenderAGL::~VideoRenderAGL()
-{
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
-
-
-#ifdef USE_EVENT_HANDLERS
- // remove event handlers
- OSStatus status;
- if(_isHIViewRef)
- {
- status = RemoveEventHandler(_hiviewEventHandlerRef);
- }
- else
- {
- status = RemoveEventHandler(_windowEventHandlerRef);
- }
- if(noErr != status)
- {
- if(_isHIViewRef)
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
- }
- else
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
- }
- }
-
-#endif
-
- OSStatus status;
-#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
- if(_windowEventHandlerRef)
- {
- status = RemoveEventHandler(_windowEventHandlerRef);
- if(status != noErr)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
- }
- }
-#endif
-
-#ifdef NEW_HIVIEW_EVENT_HANDLER
- if(_hiviewEventHandlerRef)
- {
- status = RemoveEventHandler(_hiviewEventHandlerRef);
- if(status != noErr)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
- }
- }
-#endif
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
- }
-
- if (_aglContext != 0)
- {
- aglSetCurrentContext(_aglContext);
- aglDestroyContext(_aglContext);
- _aglContext = 0;
- }
-
- // Delete all channels
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
- while (it!= _aglChannels.end())
- {
- delete it->second;
- _aglChannels.erase(it);
- it = _aglChannels.begin();
- }
- _aglChannels.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while(zIt != _zOrderToChannel.end())
- {
- _zOrderToChannel.erase(zIt);
- zIt = _zOrderToChannel.begin();
- }
- _zOrderToChannel.clear();
-
- //delete _renderCritSec;
-
-
-}
-
-int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
-{
- aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
- return 0;
-}
-
-int VideoRenderAGL::Init()
-{
- LockAGLCntx();
-
- // Start rendering thread...
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
- return -1;
- }
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
-
- // Create mixing textures
- if (CreateMixingContext() == -1)
- {
- //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- LockAGLCntx();
-
- //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
-
- if (HasChannel(channel))
- {
- //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
- UnlockAGLCntx();k
- return NULL;
- }
-
- if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
- {
- // There are already one channel using this zOrder
- // TODO: Allow multiple channels with same zOrder
- }
-
- VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
-
- if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- if (newAGLChannel)
- {
- delete newAGLChannel;
- newAGLChannel = NULL;
- }
- //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
- //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return NULL;
- }
-k
- _aglChannels[channel] = newAGLChannel;
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
-
- UnlockAGLCntx();
- return newAGLChannel;
-}
-
-int VideoRenderAGL::DeleteAllAGLChannels()
-{
- CriticalSectionScoped cs(&_renderCritSec);
-
- //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
- //int i = 0 ;
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- VideoChannelAGL* channel = it->second;
- if (channel)
- delete channel;
-
- _aglChannels.erase(it);
- it = _aglChannels.begin();
- }
- _aglChannels.clear();
- return 0;
-}
-
-int VideoRenderAGL::DeleteAGLChannel(int channel)
-{
- CriticalSectionScoped cs(&_renderCritSec);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
-
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.find(channel);
- if (it != _aglChannels.end())
- {
- delete it->second;
- _aglChannels.erase(it);
- }
- else
- {
- //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
- return -1;
- }
-
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while( zIt != _zOrderToChannel.end())
- {
- if (zIt->second == channel)
- {
- _zOrderToChannel.erase(zIt);
- break;
- }
- zIt++;// = _zOrderToChannel.begin();
- }
-
- return 0;
-}
-
-int VideoRenderAGL::StopThread()
-{
- CriticalSectionScoped cs(&_renderCritSec);
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _renderCritSec.Leave();
- tmpPtr->Stop();
- delete tmpPtr;
- _renderCritSec.Enter();
- }
-
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
-
- return 0;
-}
-
-bool VideoRenderAGL::IsFullScreen()
-{
- CriticalSectionScoped cs(&_renderCritSec);
- return _fullScreen;
-}
-
-bool VideoRenderAGL::HasChannels()
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
-
- if (_aglChannels.begin() != _aglChannels.end())
- {
- return true;
- }
-
- return false;
-}
-
-bool VideoRenderAGL::HasChannel(int channel)
-{
- CriticalSectionScoped cs(&_renderCritSec);
-
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
- if (it != _aglChannels.end())
- {
- return true;
- }
-
- return false;
-}
-
-int VideoRenderAGL::GetChannels(std::list<int>& channelList)
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- channelList.push_back(it->first);
- it++;
- }
-
- return 0;
-}
-
-VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
-
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
-
- if (it != _aglChannels.end())
- {
- VideoChannelAGL* aglChannel = it->second;
- if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- return NULL;
- }
-
- std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
- while(it != _zOrderToChannel.end())
- {
- if (it->second == channel)
- {
- if (it->first != zOrder)
- {
- _zOrderToChannel.erase(it);
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
- }
- break;
- }
- it++;
- }
- return aglChannel;
- }
-
- return NULL;
-}
-
-bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderAGL::ScreenUpdateProcess()
-{
- _screenUpdateEvent->Wait(100);
-
- LockAGLCntx();
-
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- return false;
- }
-
- if (aglSetCurrentContext(_aglContext) == GL_FALSE)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- // We have a new window size, update the context.
- if (aglUpdateContext(_aglContext) == GL_FALSE)
- {
- UnlockAGLCntx();
- return true;
- }
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- }
-
- // this section will poll to see if the window size has changed
- // this is causing problem w/invalid windowRef
- // this code has been modified and exists now in the window event handler
-#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
- if (_isHIViewRef)
- {
-
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
- WindowRef window = HIViewGetWindow(_hiviewRef);
-
- if(FALSE == IsValidWindowPtr(window))
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
- if (window == NULL)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
-
- if(FALSE == MacIsWindowVisible(window))
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
-
- HIRect viewBounds; // Placement and size for HIView
- int windowWidth = 0; // Parent window width
- int windowHeight = 0; // Parent window height
-
- // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
- // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
- Rect contentBounds =
- { 0, 0, 0, 0}; // The bounds for the parent window
-
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &contentBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
-#endif
-
- Rect globalBounds =
- { 0, 0, 0, 0}; // The bounds for the parent window
- globalBounds.top = contentBounds.top;
- globalBounds.right = contentBounds.right;
- globalBounds.bottom = contentBounds.bottom;
- globalBounds.left = contentBounds.left;
-
- windowHeight = globalBounds.bottom - globalBounds.top;
- windowWidth = globalBounds.right - globalBounds.left;
-
- // Get the size of the HIViewRef
- HIViewGetBounds(_hiviewRef, &viewBounds);
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- // Check if this is the first call..
- if (_lastWindowHeight == -1 &&
- _lastWindowWidth == -1)
- {
- _lastWindowWidth = windowWidth;
- _lastWindowHeight = windowHeight;
-
- _lastViewBounds.origin.x = viewBounds.origin.x;
- _lastViewBounds.origin.y = viewBounds.origin.y;
- _lastViewBounds.size.width = viewBounds.size.width;
- _lastViewBounds.size.height = viewBounds.size.height;
- }
- sfasdfasdf
-
- bool resized = false;
-
- // Check if parent window size has changed
- if (windowHeight != _lastWindowHeight ||
- windowWidth != _lastWindowWidth)
- {
- resized = true;
- }
-
- // Check if the HIView has new size or is moved in the parent window
- if (_lastViewBounds.origin.x != viewBounds.origin.x ||
- _lastViewBounds.origin.y != viewBounds.origin.y ||
- _lastViewBounds.size.width != viewBounds.size.width ||
- _lastViewBounds.size.height != viewBounds.size.height)
- {
- // The HiView is resized or has moved.
- resized = true;
- }
-
- if (resized)
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
-
- // Calculate offset between the windows
- // {x, y, widht, height}, x,y = lower left corner
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
- contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
-
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- // We need to change the viewport too if the HIView size has changed
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-
- }
- _lastWindowWidth = windowWidth;
- _lastWindowHeight = windowHeight;
-
- _lastViewBounds.origin.x = viewBounds.origin.x;
- _lastViewBounds.origin.y = viewBounds.origin.y;
- _lastViewBounds.size.width = viewBounds.size.width;
- _lastViewBounds.size.height = viewBounds.size.height;
-
- }
-#endif
- if (_fullScreen)
- {
- // TODO
- // We use double buffers, must always update
- //RenderOffScreenBuffersToBackBuffer();
- }
- else
- {
- // Check if there are any updated buffers
- bool updated = false;
-
- // TODO: check if window size is updated!
- // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
- while (it != _aglChannels.end())
- {
-
- VideoChannelAGL* aglChannel = it->second;
- aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
- aglChannel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
-
- if (updated)
- {
- // At least on buffers is updated, we need to repaint the texture
- if (RenderOffScreenBuffers() != -1)
- {
- // MF
- //SwapAndDisplayBuffers();
- }
- else
- {
- // Error updating the mixing texture, don't swap.
- }
- }
- }
-
- UnlockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
- return true;
-}
-
-void VideoRenderAGL::ParentWindowResized(WindowRef window)
-{
- //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
-
- LockAGLCntx();
-k
- // set flag
- _windowHasResized = false;
-
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
- UnlockAGLCntx();
- return;
- }
-
- if(FALSE == IsValidWindowPtr(window))
- {
- //WEBRTC_LOG(kTraceError, "invalid windowRef");
- UnlockAGLCntx();
- return;
- }
-
- if (window == NULL)
- {
- //WEBRTC_LOG(kTraceError, "windowRef = NULL");
- UnlockAGLCntx();
- return;
- }
-
- if(FALSE == MacIsWindowVisible(window))
- {
- //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
- UnlockAGLCntx();
- return;
- }
-
- Rect contentBounds =
- { 0, 0, 0, 0};
-
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &contentBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
-#endif
-
- //WEBRTC_LOG(kTraceDebug, "%s contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
-
- // update global vars
- _currentParentWindowBounds.top = contentBounds.top;
- _currentParentWindowBounds.left = contentBounds.left;
- _currentParentWindowBounds.bottom = contentBounds.bottom;
- _currentParentWindowBounds.right = contentBounds.right;
-
- _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
- _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
-
- _windowHasResized = true;
-
- // ********* update AGL offsets
- HIRect viewBounds;
- HIViewGetBounds(_hiviewRef, &viewBounds);
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
- //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
- //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
-
- aglSetCurrentContext(_aglContext);
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- // We need to change the viewport too if the HIView size has changed
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-
- UnlockAGLCntx();
-
- return;
-}
-
-int VideoRenderAGL::CreateMixingContext()
-{
-
- LockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
-
- // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure
- // a hardware renderer is used and not a software renderer.
-
- GLint attributes[] =
- {
- AGL_DOUBLEBUFFER,
- AGL_WINDOW,
- AGL_RGBA,
- AGL_NO_RECOVERY,
- AGL_ACCELERATED,
- AGL_RED_SIZE, 8,
- AGL_GREEN_SIZE, 8,
- AGL_BLUE_SIZE, 8,
- AGL_ALPHA_SIZE, 8,
- AGL_DEPTH_SIZE, 24,
- AGL_NONE,
- };
-
- AGLPixelFormat aglPixelFormat;
-
- // ***** Set up the OpenGL Context *****
-
- // Get a pixel format for the attributes above
- aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
- if (NULL == aglPixelFormat)
- {
- //WEBRTC_LOG(kTraceError, "Could not create pixel format");
- UnlockAGLCntx();
- return -1;
- }
-
- // Create an AGL context
- _aglContext = aglCreateContext(aglPixelFormat, NULL);
- if (_aglContext == NULL)
- {
- //WEBRTC_LOG(kTraceError, "Could no create AGL context");
- UnlockAGLCntx();
- return -1;
- }
-
- // Release the pixel format memory
- aglDestroyPixelFormat(aglPixelFormat);
-
- // Set the current AGL context for the rest of the settings
- if (aglSetCurrentContext(_aglContext) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- if (_isHIViewRef)
- {
- //---------------------------
- // BEGIN: new test code
-#if 0
- // Don't use this one!
- // There seems to be an OS X bug that can't handle
- // movements and resizing of the parent window
- // and or the HIView
- if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-#else
-
- // Get the parent window for this control
- WindowRef window = GetControlOwner(_hiviewRef);
-
- Rect globalBounds =
- { 0,0,0,0}; // The bounds for the parent window
- HIRect viewBounds; // Placemnt in the parent window and size.
- int windowHeight = 0;
-
- // Rect titleBounds = {0,0,0,0};
- // GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
- // _titleBarHeight = titleBounds.top - titleBounds.bottom;
- // if(0 == _titleBarHeight)
- // {
- // //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
- // //return -1;
- // }
-
-
- // Get the bounds for the parent window
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &globalBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
-#endif
- windowHeight = globalBounds.bottom - globalBounds.top;
-
- // Get the bounds for the HIView
- HIViewGetBounds(_hiviewRef, &viewBounds);
-
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
-
- //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
-
-
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- GLint surfaceOrder = 1; // 1: above window, -1 below.
- //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
- aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
-
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-#endif
-
- }
- else
- {
- if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
- {
- //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
- }
-
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
-
- // opaque surface
- int surfaceOpacity = 1;
- if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- // 1 -> sync to screen rat, slow...
- //int swapInterval = 0; // 0 don't sync with vertical trace
- int swapInterval = 0; // 1 sync with vertical trace
- if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- // Update the rect with the current size
- if (GetWindowRect(_windowRect) == -1)
- {
- //WEBRTC_LOG(kTraceError, "Could not get window size");
- UnlockAGLCntx();
- return -1;
- }
-
- // Disable not needed functionality to increase performance
- glDisable(GL_DITHER);
- glDisable(GL_ALPHA_TEST);
- glDisable(GL_STENCIL_TEST);
- glDisable(GL_FOG);
- glDisable(GL_TEXTURE_2D);
- glPixelZoom(1.0, 1.0);
-
- glDisable(GL_BLEND);
- glDisable(GL_DEPTH_TEST);
- glDepthMask(GL_FALSE);
- glDisable(GL_CULL_FACE);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- GLenum glErr = glGetError();
-
- if (glErr)
- {
- }
-
- UpdateClipping();
-
- //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::RenderOffScreenBuffers()
-{
- LockAGLCntx();
-
- // Get the current window size, it might have changed since last render.
- if (GetWindowRect(_windowRect) == -1)
- {
- //WEBRTC_LOG(kTraceError, "Could not get window rect");
- UnlockAGLCntx();
- return -1;
- }
-
- if (aglSetCurrentContext(_aglContext) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
- UnlockAGLCntx();
- return -1;
- }
-
- // HERE - onl if updated!
- glClear(GL_COLOR_BUFFER_BIT);
-
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- int channelId = rIt->second;
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
-
- VideoChannelAGL* aglChannel = it->second;
-
- aglChannel->RenderOffScreenBuffer();
- }
-
- SwapAndDisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::SwapAndDisplayBuffers()
-{
-
- LockAGLCntx();
- if (_fullScreen)
- {
- // TODO:
- // Swap front and back buffers, rendering taking care of in the same call
- //aglSwapBuffers(_aglContext);
- // Update buffer index to the idx for the next rendering!
- //_textureIdx = (_textureIdx + 1) & 1;
- }
- else
- {
- // Single buffer rendering, only update context.
- glFlush();
- aglSwapBuffers(_aglContext);
- HIViewSetNeedsDisplay(_hiviewRef, true);
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::GetWindowRect(Rect& rect)
-{
-
- LockAGLCntx();
-
- if (_isHIViewRef)
- {
- if (_hiviewRef)
- {
- HIRect HIViewRect1;
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- rect.top = 0;
- rect.left = 0;
- rect.right = 0;
- rect.bottom = 0;
- //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
- UnlockAGLCntx();
- }
- HIViewGetBounds(_hiviewRef,&HIViewRect1);
- HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
- if(HIViewRect1.origin.x < 0)
- {
- rect.top = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
- }
- else
- {
- rect.top = HIViewRect1.origin.x;
- }
-
- if(HIViewRect1.origin.y < 0)
- {
- rect.left = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
- }
- else
- {
- rect.left = HIViewRect1.origin.y;
- }
-
- if(HIViewRect1.size.width < 0)
- {
- rect.right = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
- }
- else
- {
- rect.right = HIViewRect1.size.width;
- }
-
- if(HIViewRect1.size.height < 0)
- {
- rect.bottom = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
- }
- else
- {
- rect.bottom = HIViewRect1.size.height;
- }
-
- ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
- UnlockAGLCntx();
- }
- else
- {
- //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
- UnlockAGLCntx();
- }
- }
- else
- {
- if (_windowRef)
- {
- GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
- UnlockAGLCntx();
- }
- else
- {
- //WEBRTC_LOG(kTraceError, "No WindowRef");
- UnlockAGLCntx();
- }
- }
-}
-
-int VideoRenderAGL::UpdateClipping()
-{
- //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
- LockAGLCntx();
-
- if(_isHIViewRef)
- {
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
- UnlockAGLCntx();
- return -1;
- }
-
- RgnHandle visibleRgn = NewRgn();
- SetEmptyRgn (visibleRgn);
-
- if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
- {
- }
-
- if(GL_FALSE == aglSetCurrentContext(_aglContext))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
- }
-
- if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
- }
-
- if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
- }
-
- DisposeRgn(visibleRgn);
- }
- else
- {
- //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
- }
-
- //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
- UnlockAGLCntx();
- return true;
-}
-
-int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
-{
-
- // LockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
- OSStatus osStatus = 0;
- OSErr osErr = 0;
-
- RgnHandle tempRgn = NewRgn();
- if (IsControlVisible(control))
- {
- RgnHandle childRgn = NewRgn();
- WindowRef window = GetControlOwner(control);
- ControlRef rootControl;
- GetRootControl(window, &rootControl); // 'wvnc'
- ControlRef masterControl;
- osStatus = GetSuperControl(rootControl, &masterControl);
- // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
-
- if (masterControl != NULL)
- {
- CheckValidRegion(visibleRgn);
- // init visibleRgn with region of 'wvnc'
- osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
- //GetSuperControl(rootControl, &rootControl);
- ControlRef tempControl = control, lastControl = 0;
- while (tempControl != masterControl) // current control != master
-
- {
- CheckValidRegion(tempRgn);
-
- // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
- ControlRef subControl;
-
- osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
- CheckValidRegion(tempRgn);
-
- osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
- // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
- CheckValidRegion(tempRgn);
-
- SectRgn(tempRgn, visibleRgn, visibleRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(visibleRgn);
- if (EmptyRgn(visibleRgn)) // if the region is empty, bail
- break;
-
- if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
-
- {
- UInt16 numChildren;
- osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
- // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
-
- // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
- for (int i = 0; i < numChildren; i++)
- {
- osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
- // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
- if ( subControl == lastControl ) // break because of zorder
-
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
- break;
- }
-
- if (!IsControlVisible(subControl)) // dont' clip invisible controls
-
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
- continue;
- }
-
- if(!subControl) continue;
-
- osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
- CheckValidRegion(tempRgn);
- if(osStatus != 0)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
- continue;
- }
- if(!tempRgn)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
- continue;
- }
-
- osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
- CheckValidRegion(tempRgn);
- // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
- if(osStatus != 0)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
- continue;
- }
- if(!rootControl)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
- continue;
- }
-
- UnionRgn(tempRgn, childRgn, childRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(childRgn);
- CheckValidRegion(visibleRgn);
- if(!childRgn)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
- continue;
- }
-
- } // next child control
- }
- lastControl = tempControl;
- GetSuperControl(tempControl, &subControl);
- tempControl = subControl;
- }
-
- DiffRgn(visibleRgn, childRgn, visibleRgn);
- CheckValidRegion(visibleRgn);
- CheckValidRegion(childRgn);
- DisposeRgn(childRgn);
- }
- else
- {
- CopyRgn(tempRgn, visibleRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(visibleRgn);
- }
- DisposeRgn(tempRgn);
- }
-
- //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
- //_aglCritPtr->Leave();
- return 0;
-}
-
-bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
-{
-
- Handle hndSize = (Handle)rHandle;
- long size = GetHandleSize(hndSize);
- if(0 == size)
- {
-
- OSErr memErr = MemError();
- if(noErr != memErr)
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
- }
- else
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
- }
-
- }
- else
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
- }
-
- if(false == IsValidRgnHandle(rHandle))
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
- assert(false);
- }
-
- int err = QDError();
- switch(err)
- {
- case 0:
- break;
- case -147:
- //WEBRTC_LOG(kTraceError, "ERROR region too big");
- assert(false);
- break;
-
- case -149:
- //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
- assert(false);
- break;
-
- default:
- //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
- assert(false);
- break;
- }
-
- return true;
-}
-
-int VideoRenderAGL::ChangeWindow(void* newWindowRef)
-{
-
- LockAGLCntx();
-
- UnlockAGLCntx();
- return -1;
-}
-
-int32_t VideoRenderAGL::StartRender()
-{
-
- LockAGLCntx();
- const unsigned int MONITOR_FREQ = 60;
- if(TRUE == _renderingIsPaused)
- {
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
-
- // we already have the thread. Most likely StopRender() was called and they were paused
- if(FALSE == _screenUpdateThread->Start())
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
- if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- return 0;
- }
-
- _screenUpdateThread.reset(
- new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
- _screenUpdateEvent = EventWrapper::Create();
-
- if (!_screenUpdateThread)
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
- _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
-
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
-
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::StopRender()
-{
- LockAGLCntx();
-
- if(!_screenUpdateThread || !_screenUpdateEvent)
- {
- _renderingIsPaused = TRUE;
- UnlockAGLCntx();
- return 0;
- }
-
- if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
- {
- _renderingIsPaused = FALSE;
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- _renderingIsPaused = TRUE;
-
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID)
-{
-
- LockAGLCntx();
-
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- VideoChannelAGL* channel = it->second;
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
- delete channel;
- it++;
- }
- _aglChannels.clear();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
-
- LockAGLCntx();
- UnlockAGLCntx();
- return -1;
-
-}
-
-void VideoRenderAGL::LockAGLCntx()
-{
- _renderCritSec.Enter();
-}
-void VideoRenderAGL::UnlockAGLCntx()
-{
- _renderCritSec.Leave();
-}
-
-} // namespace webrtc
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h
deleted file mode 100644
index c0a60597e98..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(CARBON_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
-#define NEW_HIVIEW_EVENT_HANDLER 1
-#define USE_STRUCT_RGN
-
-#include <AGL/agl.h>
-#include <Carbon/Carbon.h>
-#include <OpenGL/OpenGL.h>
-#include <OpenGL/glext.h>
-#include <OpenGL/glu.h>
-#include <list>
-#include <map>
-#include <memory>
-
-class VideoRenderAGL;
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventWrapper;
-
-class VideoChannelAGL : public VideoRenderCallback {
- public:
-
- VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
- virtual ~VideoChannelAGL();
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
- virtual int DeliverFrame(const VideoFrame& videoFrame);
- virtual int UpdateSize(int width, int height);
- int SetStreamSettings(int streamId, float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int SetStreamCropSettings(int streamId, float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int RenderOffScreenBuffer();
- int IsUpdated(bool& isUpdated);
- virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
- virtual int32_t RenderFrame(const uint32_t streamId, VideoFrame& videoFrame);
-
- private:
-
- AGLContext _aglContext;
- int _id;
- VideoRenderAGL* _owner;
- int _width;
- int _height;
- int _stretchedWidth;
- int _stretchedHeight;
- float _startHeight;
- float _startWidth;
- float _stopWidth;
- float _stopHeight;
- int _xOldWidth;
- int _yOldHeight;
- int _oldStretchedHeight;
- int _oldStretchedWidth;
- unsigned char* _buffer;
- size_t _bufferSize;
- size_t _incomingBufferSize;
- bool _bufferIsUpdated;
- bool _sizeInitialized;
- int _numberOfStreams;
- bool _bVideoSizeStartedChanging;
- GLenum _pixelFormat;
- GLenum _pixelDataType;
- unsigned int _texture;
-};
-
-class VideoRenderAGL {
- public:
- VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
- VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
- ~VideoRenderAGL();
-
- int Init();
- VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth,
- float startHeight, float stopWidth,
- float stopHeight);
- VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder,
- float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int DeleteAGLChannel(int channel);
- int DeleteAllAGLChannels();
- int StopThread();
- bool IsFullScreen();
- bool HasChannels();
- bool HasChannel(int channel);
- int GetChannels(std::list<int>& channelList);
- void LockAGLCntx();
- void UnlockAGLCntx();
-
- static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
-
- // ********** new module functions ************ //
- int ChangeWindow(void* newWindowRef);
- int32_t StartRender();
- int32_t StopRender();
- int32_t DeleteAGLChannel(const uint32_t streamID);
- int32_t GetChannelProperties(const uint16_t streamId, uint32_t& zOrder,
- float& left, float& top, float& right,
- float& bottom);
-
- protected:
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect);
-
- private:
- int CreateMixingContext();
- int RenderOffScreenBuffers();
- int SwapAndDisplayBuffers();
- int UpdateClipping();
- int CalculateVisibleRegion(ControlRef control, RgnHandle& visibleRgn,
- bool clipChildren);
- bool CheckValidRegion(RgnHandle rHandle);
- void ParentWindowResized(WindowRef window);
-
- // Carbon GUI event handlers
- static pascal OSStatus sHandleWindowResized(
- EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
- static pascal OSStatus sHandleHiViewResized(
- EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
-
- HIViewRef _hiviewRef;
- WindowRef _windowRef;
- bool _fullScreen;
- int _id;
- webrtc::CriticalSectionWrapper& _renderCritSec;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- webrtc::EventWrapper* _screenUpdateEvent;
- bool _isHIViewRef;
- AGLContext _aglContext;
- int _windowWidth;
- int _windowHeight;
- int _lastWindowWidth;
- int _lastWindowHeight;
- int _lastHiViewWidth;
- int _lastHiViewHeight;
- int _currentParentWindowHeight;
- int _currentParentWindowWidth;
- Rect _currentParentWindowBounds;
- bool _windowHasResized;
- Rect _lastParentWindowBounds;
- Rect _currentHIViewBounds;
- Rect _lastHIViewBounds;
- Rect _windowRect;
- std::map<int, VideoChannelAGL*> _aglChannels;
- std::multimap<int, int> _zOrderToChannel;
- EventHandlerRef _hiviewEventHandlerRef;
- EventHandlerRef _windowEventHandlerRef;
- HIRect _currentViewBounds;
- HIRect _lastViewBounds;
- bool _renderingIsPaused;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
deleted file mode 100644
index f85be5fb5e2..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(CARBON_RENDERING)
-
-#include <AGL/agl.h>
-#include "webrtc/modules/video_render/mac/video_render_agl.h"
-#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
-_id(id),
-_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
-_fullScreen(fullscreen),
-_ptrWindow(window)
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
-
-}
-
-VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
- delete &_renderMacCarbonCritsect;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::Init()
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
-
- if (!_ptrWindow)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
-
- // We don't know if the user passed us a WindowRef or a HIViewRef, so test.
- bool referenceIsValid = false;
-
- // Check if it's a valid WindowRef
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
- WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
- if (IsValidWindowPtr(*windowRef))
- {
- _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
- referenceIsValid = true;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
- }
- else
- {
- HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
- if (HIViewIsValid(*hiviewRef))
- {
- _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
- referenceIsValid = true;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
- }
- }
-
- if(!referenceIsValid)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
- return -1;
- }
-
- if(!_ptrCarbonRender)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
- }
-
- int retVal = _ptrCarbonRender->Init();
- if (retVal == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::ChangeWindow(void* window)
-{
- return -1;
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
-
- if (window == NULL)
- {
- return -1;
- }
- _ptrWindow = window;
-
-
- _ptrWindow = window;
-
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderMacCarbonImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
-
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- VideoChannelAGL* AGLChannel = NULL;
-
- if(!_ptrWindow)
- {
- }
-
- if(!AGLChannel)
- {
- AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
- }
-
- return AGLChannel;
-
-}
-
-int32_t
-VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const uint32_t streamId)
-{
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- _ptrCarbonRender->DeleteAGLChannel(streamId);
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- return -1;
- return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
-}
-
-int32_t
-VideoRenderMacCarbonImpl::StartRender()
-{
- return _ptrCarbonRender->StartRender();
-}
-
-int32_t
-VideoRenderMacCarbonImpl::StopRender()
-{
- return _ptrCarbonRender->StopRender();
-}
-
-VideoRenderType
-VideoRenderMacCarbonImpl::RenderType()
-{
- return kRenderCarbon;
-}
-
-RawVideoType
-VideoRenderMacCarbonImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool
-VideoRenderMacCarbonImpl::FullScreen()
-{
- return false;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- //NSScreen* mainScreen = [NSScreen mainScreen];
-
- //NSRect frame = [mainScreen frame];
-
- //screenWidth = frame.size.width;
- //screenHeight = frame.size.height;
- return 0;
-}
-
-uint32_t
-VideoRenderMacCarbonImpl::RenderFrameRate(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-
-} // namespace webrtc
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
deleted file mode 100644
index 9ad3a6cdd18..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(CARBON_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class VideoRenderAGL;
-
-// Class definitions
-class VideoRenderMacCarbonImpl : IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderMacCarbonImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderMacCarbonImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t FullScreenRender(void* window, const bool enable)
- {
- // not supported in Carbon at this time
- return -1;
- }
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderMacCarbonCritsect;
- bool _fullScreen;
- void* _ptrWindow;
- VideoRenderAGL* _ptrCarbonRender;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
deleted file mode 100644
index 21add272bbd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(COCOA_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class VideoRenderNSOpenGL;
-
-// Class definitions
-class VideoRenderMacCocoaImpl : IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderMacCocoaImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderMacCocoaImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t FullScreenRender(void* window, const bool enable);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderMacCocoaCritsect;
- bool _fullScreen;
- void* _ptrWindow;
- VideoRenderNSOpenGL* _ptrCocoaRender;
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
deleted file mode 100644
index 5b017fecc0c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
-#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
-_id(id),
-_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
-_fullScreen(fullscreen),
-_ptrWindow(window)
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
-}
-
-VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
- delete &_renderMacCocoaCritsect;
- if (_ptrCocoaRender)
- {
- delete _ptrCocoaRender;
- _ptrCocoaRender = NULL;
- }
-}
-
-int32_t
-VideoRenderMacCocoaImpl::Init()
-{
-
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
-
- // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
- _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
- if (!_ptrWindow)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
- int retVal = _ptrCocoaRender->Init();
- if (retVal == -1)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::ChangeWindow(void* window)
-{
-
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
-
- if (window == NULL)
- {
- return -1;
- }
- _ptrWindow = window;
-
-
- _ptrWindow = window;
- _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
-
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderMacCocoaImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
-
- if(!_ptrWindow)
- {
- }
-
- if(!nsOpenGLChannel)
- {
- nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
- }
-
- return nsOpenGLChannel;
-
-}
-
-int32_t
-VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const uint32_t streamId)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- _ptrCocoaRender->DeleteNSGLChannel(streamId);
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
-}
-
-int32_t
-VideoRenderMacCocoaImpl::StartRender()
-{
- return _ptrCocoaRender->StartRender();
-}
-
-int32_t
-VideoRenderMacCocoaImpl::StopRender()
-{
- return _ptrCocoaRender->StopRender();
-}
-
-VideoRenderType
-VideoRenderMacCocoaImpl::RenderType()
-{
- return kRenderCocoa;
-}
-
-RawVideoType
-VideoRenderMacCocoaImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool
-VideoRenderMacCocoaImpl::FullScreen()
-{
- return false;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- NSScreen* mainScreen = [NSScreen mainScreen];
-
- NSRect frame = [mainScreen frame];
-
- screenWidth = frame.size.width;
- screenHeight = frame.size.height;
- return 0;
-}
-
-uint32_t
-VideoRenderMacCocoaImpl::RenderFrameRate(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-} // namespace webrtc
-
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h
deleted file mode 100644
index 457557dad64..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-
-#import <Cocoa/Cocoa.h>
-#import <OpenGL/OpenGL.h>
-#import <OpenGL/glext.h>
-#import <OpenGL/glu.h>
-#include <QuickTime/QuickTime.h>
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
-#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
-
-class Trace;
-
-namespace rtc {
-class PlatformThread;
-} // namespace rtc
-
-namespace webrtc {
-class EventTimerWrapper;
-class VideoRenderNSOpenGL;
-class CriticalSectionWrapper;
-
-class VideoChannelNSOpenGL : public VideoRenderCallback {
-public:
- VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
- virtual ~VideoChannelNSOpenGL();
-
- // A new frame is delivered
- virtual int DeliverFrame(const VideoFrame& videoFrame);
-
- // Called when the incoming frame size and/or number of streams in mix
- // changes.
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
-
- virtual int UpdateSize(int width, int height);
-
- // Setup
- int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
- int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
-
- // Called when it's time to render the last frame for the channel
- int RenderOffScreenBuffer();
-
- // Returns true if a new buffer has been delivered to the texture
- int IsUpdated(bool& isUpdated);
- virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
-
- // ********** new module functions ************ //
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- // ********** new module helper functions ***** //
- int ChangeContext(NSOpenGLContext *nsglContext);
- int32_t GetChannelProperties(float& left,
- float& top,
- float& right,
- float& bottom);
-
-private:
-
- NSOpenGLContext* _nsglContext;
- const int _id;
- VideoRenderNSOpenGL* _owner;
- int32_t _width;
- int32_t _height;
- float _startWidth;
- float _startHeight;
- float _stopWidth;
- float _stopHeight;
- int _stretchedWidth;
- int _stretchedHeight;
- int _oldStretchedHeight;
- int _oldStretchedWidth;
- unsigned char* _buffer;
- size_t _bufferSize;
- size_t _incomingBufferSize;
- bool _bufferIsUpdated;
- int _numberOfStreams;
- GLenum _pixelFormat;
- GLenum _pixelDataType;
- unsigned int _texture;
-};
-
-class VideoRenderNSOpenGL
-{
-
-public: // methods
- VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
- ~VideoRenderNSOpenGL();
-
- static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
-
- // Allocates textures
- int Init();
- VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
- VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
- int DeleteNSGLChannel(int channel);
- int DeleteAllNSGLChannels();
- int StopThread();
- bool IsFullScreen();
- bool HasChannels();
- bool HasChannel(int channel);
- int GetChannels(std::list<int>& channelList);
- void LockAGLCntx() EXCLUSIVE_LOCK_FUNCTION(_nsglContextCritSec);
- void UnlockAGLCntx() UNLOCK_FUNCTION(_nsglContextCritSec);
-
- // ********** new module functions ************ //
- int ChangeWindow(CocoaRenderView* newWindowRef);
- int32_t StartRender();
- int32_t StopRender();
- int32_t DeleteNSGLChannel(const uint32_t streamID);
- int32_t GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom);
-
- int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- // ********** new module helper functions ***** //
- int configureNSOpenGLEngine();
- int configureNSOpenGLView();
- int setRenderTargetWindow();
- int setRenderTargetFullScreen();
-
-protected: // methods
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect);
-
-private: // methods
-
- int CreateMixingContext();
- int RenderOffScreenBuffers();
- int DisplayBuffers();
-
-private: // variables
-
-
- CocoaRenderView* _windowRef;
- bool _fullScreen;
- int _id;
- CriticalSectionWrapper& _nsglContextCritSec;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- EventTimerWrapper* _screenUpdateEvent;
- NSOpenGLContext* _nsglContext;
- NSOpenGLContext* _nsglFullScreenContext;
- CocoaFullScreenWindow* _fullScreenWindow;
- Rect _windowRect; // The size of the window
- int _windowWidth;
- int _windowHeight;
- std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
- std::multimap<int, int> _zOrderToChannel;
- bool _renderingIsPaused;
- NSView* _windowRefSuperView;
- NSRect _windowRefSuperViewFrame;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm
deleted file mode 100644
index b7683a96af4..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm
+++ /dev/null
@@ -1,1247 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
-_nsglContext( nsglContext),
-_id( iId),
-_owner( owner),
-_width( 0),
-_height( 0),
-_startWidth( 0.0f),
-_startHeight( 0.0f),
-_stopWidth( 0.0f),
-_stopHeight( 0.0f),
-_stretchedWidth( 0),
-_stretchedHeight( 0),
-_oldStretchedHeight( 0),
-_oldStretchedWidth( 0),
-_buffer( 0),
-_bufferSize( 0),
-_incomingBufferSize( 0),
-_bufferIsUpdated( false),
-_numberOfStreams( 0),
-_pixelFormat( GL_RGBA),
-_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
-_texture( 0)
-{
-
-}
-
-VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
-{
- if (_buffer)
- {
- delete [] _buffer;
- _buffer = NULL;
- }
-
- if (_texture != 0)
- {
- [_nsglContext makeCurrentContext];
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-}
-
-int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
-{
- _owner->LockAGLCntx();
-
- _nsglContext = nsglContext;
- [_nsglContext makeCurrentContext];
-
- _owner->UnlockAGLCntx();
- return 0;
-
-}
-
-int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
- float& right, float& bottom)
-{
-
- _owner->LockAGLCntx();
-
- left = _startWidth;
- top = _startHeight;
- right = _stopWidth;
- bottom = _stopHeight;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoChannelNSOpenGL::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if(_width != videoFrame.width() ||
- _height != videoFrame.height()) {
- if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- _owner->UnlockAGLCntx();
- return -1;
- }
- }
- int ret = DeliverFrame(videoFrame);
-
- _owner->UnlockAGLCntx();
- return ret;
-}
-
-int VideoChannelNSOpenGL::UpdateSize(int width, int height)
-{
- _owner->LockAGLCntx();
- _width = width;
- _height = height;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
-{
-
- _owner->LockAGLCntx();
- _stretchedHeight = stretchHeight;
- _stretchedWidth = stretchWidth;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- // We got a new frame size from VideoAPI, prepare the buffer
-
- _owner->LockAGLCntx();
-
- if (width == _width && _height == height)
- {
- // We already have a correct buffer size
- _numberOfStreams = numberOfStreams;
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- _width = width;
- _height = height;
-
- // Delete the old buffer, create a new one with correct size.
- if (_buffer)
- {
- delete [] _buffer;
- _bufferSize = 0;
- }
-
- _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
- _bufferSize = CalcBufferSize(kARGB, _width, _height);
- _buffer = new unsigned char [_bufferSize];
- memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
-
- [_nsglContext makeCurrentContext];
-
- if(glIsTexture(_texture))
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-
- // Create a new texture
- glGenTextures(1, (GLuint *) &_texture);
-
- GLenum glErr = glGetError();
-
- if (glErr != GL_NO_ERROR)
- {
-
- }
-
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- GLint texSize;
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
-
- if (texSize < _width || texSize < _height)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Set up th texture type and size
- glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
- 0, // level
- GL_RGBA, // internal format
- _width, // width
- _height, // height
- 0, // border 0/1 = off/on
- _pixelFormat, // format, GL_RGBA
- _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
- _buffer); // pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if (_texture == 0) {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
- _incomingBufferSize) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Using the VideoFrame for YV12: YV12 is YVU; I420 assumes
- // YUV.
- // TODO(mikhal) : Use appropriate functionality.
- // TODO(wu): See if we are using glTexSubImage2D correctly.
- int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
- if (rgbRet < 0) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- [_nsglContext makeCurrentContext];
-
- // Make sure this texture is the active one
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "ERROR %d while calling glBindTexture", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "ERROR %d while calling glTexSubImage2d", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::RenderOffScreenBuffer()
-{
-
- _owner->LockAGLCntx();
-
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- // if(_fullscreen)
- // {
- // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
- // _width = mainDisplayRect.size.width;
- // _height = mainDisplayRect.size.height;
- // glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
- // float newX = mainDisplayRect.size.width/_width;
- // float newY = mainDisplayRect.size.height/_height;
-
- // convert from 0.0 <= size <= 1.0 to
- // open gl world -1.0 < size < 1.0
- GLfloat xStart = 2.0f * _startWidth - 1.0f;
- GLfloat xStop = 2.0f * _stopWidth - 1.0f;
- GLfloat yStart = 1.0f - 2.0f * _stopHeight;
- GLfloat yStop = 1.0f - 2.0f * _startHeight;
-
- [_nsglContext makeCurrentContext];
-
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- _oldStretchedHeight = _stretchedHeight;
- _oldStretchedWidth = _stretchedWidth;
-
- glLoadIdentity();
- glEnable(GL_TEXTURE_RECTANGLE_EXT);
- glBegin(GL_POLYGON);
- {
- glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
- glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
- glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
- glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
- }
- glEnd();
-
- glDisable(GL_TEXTURE_RECTANGLE_EXT);
-
- _bufferIsUpdated = false;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
-{
- _owner->LockAGLCntx();
-
- isUpdated = _bufferIsUpdated;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
- _owner->LockAGLCntx();
-
- _startWidth = startWidth;
- _stopWidth = stopWidth;
- _startHeight = startHeight;
- _stopHeight = stopHeight;
-
- int oldWidth = _width;
- int oldHeight = _height;
- int oldNumberOfStreams = _numberOfStreams;
-
- _width = 0;
- _height = 0;
-
- int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
-
- _owner->UnlockAGLCntx();
- return retVal;
-}
-
-int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
-{
- return -1;
-}
-
-/*
- *
- * VideoRenderNSOpenGL
- *
- */
-
-VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
-_windowRef( (CocoaRenderView*)windowRef),
-_fullScreen( fullScreen),
-_id( iId),
-_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent(EventTimerWrapper::Create()),
-_nsglContext( 0),
-_nsglFullScreenContext( 0),
-_fullScreenWindow( nil),
-_windowRect( ),
-_windowWidth( 0),
-_windowHeight( 0),
-_nsglChannels( ),
-_zOrderToChannel( ),
-_renderingIsPaused (FALSE),
-_windowRefSuperView(NULL),
-_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
-{
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL"));
-}
-
-int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
-{
-
- LockAGLCntx();
-
- _windowRef = newWindowRef;
-
- if(CreateMixingContext() == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- int error = 0;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it!= _nsglChannels.end())
- {
- error |= (it->second)->ChangeContext(_nsglContext);
- it++;
- }
- if(error != 0)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-/* Check if the thread and event already exist.
- * If so then they will simply be restarted
- * If not then create them and continue
- */
-int32_t VideoRenderNSOpenGL::StartRender()
-{
-
- LockAGLCntx();
-
- const unsigned int MONITOR_FREQ = 60;
- if(TRUE == _renderingIsPaused)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
-
- // we already have the thread. Most likely StopRender() was called and they were paused
- _screenUpdateThread->Start();
- if (FALSE ==
- _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- UnlockAGLCntx();
- return 0;
- }
-
-
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
- UnlockAGLCntx();
- return -1;
- }
-
-
- UnlockAGLCntx();
- return 0;
-}
-int32_t VideoRenderNSOpenGL::StopRender()
-{
-
- LockAGLCntx();
-
- /* The code below is functional
- * but it pauses for several seconds
- */
-
- // pause the update thread and the event timer
- if(!_screenUpdateThread || !_screenUpdateEvent)
- {
- _renderingIsPaused = TRUE;
-
- UnlockAGLCntx();
- return 0;
- }
-
- _screenUpdateThread->Stop();
- if (FALSE == _screenUpdateEvent->StopTimer()) {
- _renderingIsPaused = FALSE;
-
- UnlockAGLCntx();
- return -1;
- }
-
- _renderingIsPaused = TRUE;
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::configureNSOpenGLView()
-{
- return 0;
-
-}
-
-int VideoRenderNSOpenGL::configureNSOpenGLEngine()
-{
-
- LockAGLCntx();
-
- // Disable not needed functionality to increase performance
- glDisable(GL_DITHER);
- glDisable(GL_ALPHA_TEST);
- glDisable(GL_STENCIL_TEST);
- glDisable(GL_FOG);
- glDisable(GL_TEXTURE_2D);
- glPixelZoom(1.0, 1.0);
- glDisable(GL_BLEND);
- glDisable(GL_DEPTH_TEST);
- glDepthMask(GL_FALSE);
- glDisable(GL_CULL_FACE);
-
- // Set texture parameters
- glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- }
- glViewport(0, 0, _windowWidth, _windowHeight);
-
- // Synchronize buffer swaps with vertical refresh rate
- GLint swapInt = 1;
- [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::setRenderTargetWindow()
-{
- LockAGLCntx();
-
-
- GLuint attribs[] =
- {
- NSOpenGLPFAColorSize, 24,
- NSOpenGLPFAAlphaSize, 8,
- NSOpenGLPFADepthSize, 16,
- NSOpenGLPFAAccelerated,
- 0
- };
-
- NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
- (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
-
- if(_windowRef)
- {
- [_windowRef initCocoaRenderView:fmt];
- }
- else
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _nsglContext = [_windowRef nsOpenGLContext];
- [_nsglContext makeCurrentContext];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::setRenderTargetFullScreen()
-{
- LockAGLCntx();
-
-
- GLuint attribs[] =
- {
- NSOpenGLPFAColorSize, 24,
- NSOpenGLPFAAlphaSize, 8,
- NSOpenGLPFADepthSize, 16,
- NSOpenGLPFAAccelerated,
- 0
- };
-
- NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
- (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
-
- // Store original superview and frame for use when exiting full screens
- _windowRefSuperViewFrame = [_windowRef frame];
- _windowRefSuperView = [_windowRef superview];
-
-
- // create new fullscreen window
- NSRect screenRect = [[NSScreen mainScreen]frame];
- [_windowRef setFrame:screenRect];
- [_windowRef setBounds:screenRect];
-
-
- _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
- [_fullScreenWindow grabFullScreen];
- [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
-
- if(_windowRef)
- {
- [_windowRef initCocoaRenderViewFullScreen:fmt];
- }
- else
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _nsglContext = [_windowRef nsOpenGLContext];
- [_nsglContext makeCurrentContext];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
-{
-
- if(_fullScreen)
- {
- if(_fullScreenWindow)
- {
- // Detach CocoaRenderView from full screen view back to
- // it's original parent.
- [_windowRef removeFromSuperview];
- if(_windowRefSuperView)
- {
- [_windowRefSuperView addSubview:_windowRef];
- [_windowRef setFrame:_windowRefSuperViewFrame];
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
- [_fullScreenWindow releaseFullScreen];
-
- }
- }
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
- }
-
- if (_nsglContext != 0)
- {
- [_nsglContext makeCurrentContext];
- _nsglContext = nil;
- }
-
- // Delete all channels
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it!= _nsglChannels.end())
- {
- delete it->second;
- _nsglChannels.erase(it);
- it = _nsglChannels.begin();
- }
- _nsglChannels.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while(zIt != _zOrderToChannel.end())
- {
- _zOrderToChannel.erase(zIt);
- zIt = _zOrderToChannel.begin();
- }
- _zOrderToChannel.clear();
-
-}
-
-/* static */
-int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
-{
- return -1;
-}
-
-int VideoRenderNSOpenGL::Init()
-{
-
- LockAGLCntx();
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
-
- if (CreateMixingContext() == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (HasChannel(channel))
- {
- return NULL;
- }
-
- if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
- {
-
- }
-
- VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
- if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- if (newAGLChannel)
- {
- delete newAGLChannel;
- newAGLChannel = NULL;
- }
-
- return NULL;
- }
-
- _nsglChannels[channel] = newAGLChannel;
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
-
- return newAGLChannel;
-}
-
-int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it;
- it = _nsglChannels.begin();
-
- while (it != _nsglChannels.end())
- {
- VideoChannelNSOpenGL* channel = it->second;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
- delete channel;
- it++;
- }
- _nsglChannels.clear();
- return 0;
-}
-
-int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it;
- it = _nsglChannels.find(channel);
- if (it != _nsglChannels.end())
- {
- delete it->second;
- _nsglChannels.erase(it);
- }
- else
- {
- return -1;
- }
-
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while( zIt != _zOrderToChannel.end())
- {
- if (zIt->second == (int)channel)
- {
- _zOrderToChannel.erase(zIt);
- break;
- }
- zIt++;
- }
-
- return 0;
-}
-
-int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- bool channelFound = false;
-
- // Loop through all channels until we find a match.
- // From that, get zorder.
- // From that, get T, L, R, B
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- if(streamId == rIt->second)
- {
- channelFound = true;
-
- zOrder = rIt->second;
-
- std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
- VideoChannelNSOpenGL* tempChannel = rIt->second;
-
- if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
- {
- return -1;
- }
- break;
- }
- }
-
- if(false == channelFound)
- {
-
- return -1;
- }
-
- return 0;
-}
-
-int VideoRenderNSOpenGL::StopThread()
-{
-
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s Stopping thread ", __FUNCTION__, tmpPtr);
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- tmpPtr->Stop();
- delete tmpPtr;
- }
-
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
-
- return 0;
-}
-
-bool VideoRenderNSOpenGL::IsFullScreen()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
- return _fullScreen;
-}
-
-bool VideoRenderNSOpenGL::HasChannels()
-{
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (_nsglChannels.begin() != _nsglChannels.end())
- {
- return true;
- }
- return false;
-}
-
-bool VideoRenderNSOpenGL::HasChannel(int channel)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
-
- if (it != _nsglChannels.end())
- {
- return true;
- }
- return false;
-}
-
-int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
-
- while (it != _nsglChannels.end())
- {
- channelList.push_back(it->first);
- it++;
- }
-
- return 0;
-}
-
-VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
-
- if (it != _nsglChannels.end())
- {
- VideoChannelNSOpenGL* aglChannel = it->second;
- if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
- __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
- return NULL;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
- __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
-
- std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
- while(it != _zOrderToChannel.end())
- {
- if (it->second == channel)
- {
- if (it->first != zOrder)
- {
- _zOrderToChannel.erase(it);
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
- }
- break;
- }
- it++;
- }
- return aglChannel;
- }
-
- return NULL;
-}
-
-/*
- *
- * Rendering process
- *
- */
-
-bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderNSOpenGL::ScreenUpdateProcess()
-{
-
- _screenUpdateEvent->Wait(10);
- LockAGLCntx();
-
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
- UnlockAGLCntx();
- return false;
- }
-
- [_nsglContext makeCurrentContext];
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- glViewport(0, 0, _windowWidth, _windowHeight);
- }
-
- // Check if there are any updated buffers
- bool updated = false;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it != _nsglChannels.end())
- {
-
- VideoChannelNSOpenGL* aglChannel = it->second;
- aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
- aglChannel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
-
- if (updated)
- {
-
- // At least on buffers is updated, we need to repaint the texture
- if (RenderOffScreenBuffers() != -1)
- {
- UnlockAGLCntx();
- return true;
- }
- }
- // }
- UnlockAGLCntx();
- return true;
-}
-
-/*
- *
- * Functions for creating mixing buffers and screen settings
- *
- */
-
-int VideoRenderNSOpenGL::CreateMixingContext()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if(_fullScreen)
- {
- if(-1 == setRenderTargetFullScreen())
- {
- return -1;
- }
- }
- else
- {
-
- if(-1 == setRenderTargetWindow())
- {
- return -1;
- }
- }
-
- configureNSOpenGLEngine();
-
- DisplayBuffers();
-
- GLenum glErr = glGetError();
- if (glErr)
- {
- }
-
- return 0;
-}
-
-/*
- *
- * Rendering functions
- *
- */
-
-int VideoRenderNSOpenGL::RenderOffScreenBuffers()
-{
- LockAGLCntx();
-
- // Get the current window size, it might have changed since last render.
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- [_nsglContext makeCurrentContext];
- glClear(GL_COLOR_BUFFER_BIT);
-
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- int channelId = rIt->second;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
-
- VideoChannelNSOpenGL* aglChannel = it->second;
-
- aglChannel->RenderOffScreenBuffer();
- }
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-/*
- *
- * Help functions
- *
- * All help functions assumes external protections
- *
- */
-
-int VideoRenderNSOpenGL::DisplayBuffers()
-{
-
- LockAGLCntx();
-
- glFinish();
- [_nsglContext flushBuffer];
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (_windowRef)
- {
- if(_fullScreen)
- {
- NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
- rect.bottom = 0;
- rect.left = 0;
- rect.right = mainDisplayRect.size.width;
- rect.top = mainDisplayRect.size.height;
- }
- else
- {
- rect.top = [_windowRef frame].origin.y;
- rect.left = [_windowRef frame].origin.x;
- rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
- rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
- }
-
- return 0;
- }
- else
- {
- return -1;
- }
-}
-
-int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/,
- const uint8_t* /*text*/,
- const int32_t /*textLength*/,
- const uint32_t /*textColorRef*/,
- const uint32_t /*backgroundColorRef*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/)
-{
-
- return 0;
-
-}
-
-void VideoRenderNSOpenGL::LockAGLCntx()
-{
- _nsglContextCritSec.Enter();
-}
-void VideoRenderNSOpenGL::UnlockAGLCntx()
-{
- _nsglContextCritSec.Leave();
-}
-
-/*
-
- bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
- {
- NSRect mainDisplayRect, viewRect;
-
- // Create a screen-sized window on the display you want to take over
- // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
- mainDisplayRect = [[NSScreen mainScreen] frame];
- fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
- backing:NSBackingStoreBuffered defer:YES];
-
- // Set the window level to be above the menu bar
- [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
-
- // Perform any other window configuration you desire
- [fullScreenWindow setOpaque:YES];
- [fullScreenWindow setHidesOnDeactivate:YES];
-
- // Create a view with a double-buffered OpenGL context and attach it to the window
- // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
- viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
- fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
- [fullScreenWindow setContentView:fullScreenView];
-
- // Show the window
- [fullScreenWindow makeKeyAndOrderFront:self];
-
- // Set the scene with the full-screen viewport and viewing transformation
- [scene setViewportRect:viewRect];
-
- // Assign the view's MainController to self
- [fullScreenView setMainController:self];
-
- if (!isAnimating) {
- // Mark the view as needing drawing to initalize its contents
- [fullScreenView setNeedsDisplay:YES];
- }
- else {
- // Start playing the animation
- [fullScreenView startAnimation];
- }
-
- }
-
-
-
- */
-
-
-} // namespace webrtc
-
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp b/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
deleted file mode 100644
index c443a58f6cb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
+++ /dev/null
Binary files differ
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc
deleted file mode 100644
index cea2f6b56fe..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc
+++ /dev/null
@@ -1,645 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/test/testAPI/testAPI.h"
-
-#include <stdio.h>
-
-#if defined(_WIN32)
-#include <tchar.h>
-#include <windows.h>
-#include <assert.h>
-#include <fstream>
-#include <iostream>
-#include <string>
-#include <windows.h>
-#include <ddraw.h>
-
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <iostream>
-#include <sys/time.h>
-
-#endif
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_render/video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor);
-int TestSingleStream(VideoRender* renderModule);
-int TestFullscreenStream(VideoRender* &renderModule,
- void* window,
- const VideoRenderType videoRenderType);
-int TestBitmapText(VideoRender* renderModule);
-int TestMultipleStreams(VideoRender* renderModule);
-int TestExternalRender(VideoRender* renderModule);
-
-#define TEST_FRAME_RATE 30
-#define TEST_TIME_SECOND 5
-#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
-#define TEST_STREAM0_START_COLOR 0
-#define TEST_STREAM1_START_COLOR 64
-#define TEST_STREAM2_START_COLOR 128
-#define TEST_STREAM3_START_COLOR 192
-
-#if defined(WEBRTC_LINUX)
-
-#define GET_TIME_IN_MS timeGetTime()
-
-unsigned long timeGetTime()
-{
- struct timeval tv;
- struct timezone tz;
- unsigned long val;
-
- gettimeofday(&tv, &tz);
- val= tv.tv_sec*1000+ tv.tv_usec/1000;
- return(val);
-}
-
-#elif defined(WEBRTC_MAC)
-
-#include <unistd.h>
-
-#define GET_TIME_IN_MS timeGetTime()
-
-unsigned long timeGetTime()
-{
- return 0;
-}
-
-#else
-
-#define GET_TIME_IN_MS ::timeGetTime()
-
-#endif
-
-using namespace std;
-
-#if defined(_WIN32)
-LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
-{
- switch(uMsg)
- {
- case WM_DESTROY:
- break;
- case WM_COMMAND:
- break;
- }
- return DefWindowProc(hWnd,uMsg,wParam,lParam);
-}
-
-int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
-{
- HINSTANCE hinst = GetModuleHandle(0);
- WNDCLASSEX wcx;
- wcx.hInstance = hinst;
- wcx.lpszClassName = TEXT("VideoRenderTest");
- wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
- wcx.style = CS_DBLCLKS;
- wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
- wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
- wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
- wcx.lpszMenuName = NULL;
- wcx.cbSize = sizeof (WNDCLASSEX);
- wcx.cbClsExtra = 0;
- wcx.cbWndExtra = 0;
- wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
-
- // Register our window class with the operating system.
- // If there is an error, exit program.
- if ( !RegisterClassEx (&wcx) )
- {
- MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
- return 0;
- }
-
- // Create the main window.
- hwndMain = CreateWindowEx(
- 0, // no extended styles
- TEXT("VideoRenderTest"), // class name
- TEXT("VideoRenderTest Window"), // window name
- WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
- 800, // horizontal position
- 0, // vertical position
- width, // width
- height, // height
- (HWND) NULL, // no parent or owner window
- (HMENU) NULL, // class menu used
- hinst, // instance handle
- NULL); // no window creation data
-
- if (!hwndMain)
- return -1;
-
- // Show the window using the flag specified by the program
- // that started the application, and send the application
- // a WM_PAINT message.
-
- ShowWindow(hwndMain, SW_SHOWDEFAULT);
- UpdateWindow(hwndMain);
- return 0;
-}
-
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-
-int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
-
-{
- int screen, xpos = 10, ypos = 10;
- XEvent evnt;
- XSetWindowAttributes xswa; // window attribute struct
- XVisualInfo vinfo; // screen visual info struct
- unsigned long mask; // attribute mask
-
- // get connection handle to xserver
- Display* _display = XOpenDisplay( NULL );
-
- // get screen number
- screen = DefaultScreen(_display);
-
- // put desired visual info for the screen in vinfo
- if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
- {
- //printf( "Screen visual info match!\n" );
- }
-
- // set window attributes
- xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
- xswa.event_mask = StructureNotifyMask | ExposureMask;
- xswa.background_pixel = 0;
- xswa.border_pixel = 0;
-
- // value mask for attributes
- mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
-
- switch( winNum )
- {
- case 0:
- xpos = 200;
- ypos = 200;
- break;
- case 1:
- xpos = 300;
- ypos = 200;
- break;
- default:
- break;
- }
-
- // create a subwindow for parent (defroot)
- Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
- xpos, ypos,
- width,
- height,
- 0, vinfo.depth,
- InputOutput,
- vinfo.visual,
- mask, &xswa);
-
- // Set window name
- if( winNum == 0 )
- {
- XStoreName(_display, _window, "VE MM Local Window");
- XSetIconName(_display, _window, "VE MM Local Window");
- }
- else if( winNum == 1 )
- {
- XStoreName(_display, _window, "VE MM Remote Window");
- XSetIconName(_display, _window, "VE MM Remote Window");
- }
-
- // make x report events for mask
- XSelectInput(_display, _window, StructureNotifyMask);
-
- // map the window to the display
- XMapWindow(_display, _window);
-
- // wait for map event
- do
- {
- XNextEvent(_display, &evnt);
- }
- while (evnt.type != MapNotify || evnt.xmap.event != _window);
-
- *outWindow = _window;
- *outDisplay = _display;
-
- return 0;
-}
-#endif // WEBRTC_LINUX
-
-// Note: Mac code is in testApi_mac.mm.
-
-class MyRenderCallback: public VideoRenderCallback
-{
-public:
- MyRenderCallback() :
- _cnt(0)
- {
- }
- ;
- ~MyRenderCallback()
- {
- }
- ;
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- _cnt++;
- if (_cnt % 100 == 0)
- {
- printf("Render callback %d \n",_cnt);
- }
- return 0;
- }
- int32_t _cnt;
-};
-
-void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor) {
- // changing color
- static uint8_t color = startColor;
-
- memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane));
- memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane));
- memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane));
-
- ++color;
-}
-
-int TestSingleStream(VideoRender* renderModule) {
- int error = 0;
- // Add settings for a stream to render
- printf("Add stream 0 to entire window\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
-
- printf("Start render\n");
- error = renderModule->StartRender(streamId0);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
- // Render this frame with the specified delay
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
- + renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(1000/TEST_FRAME_RATE);
- }
-
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
-
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
-
- return 0;
-}
-
-int TestFullscreenStream(VideoRender* &renderModule,
- void* window,
- const VideoRenderType videoRenderType) {
- VideoRender::DestroyVideoRender(renderModule);
- renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
-
- TestSingleStream(renderModule);
-
- VideoRender::DestroyVideoRender(renderModule);
- renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
-
- return 0;
-}
-
-int TestBitmapText(VideoRender* renderModule) {
-#if defined(WIN32)
-
- int error = 0;
- // Add settings for a stream to render
- printf("Add stream 0 to entire window\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
-
- printf("Adding Bitmap\n");
- DDCOLORKEY ColorKey; // black
- ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
- ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
- HBITMAP hbm = (HBITMAP)LoadImage(NULL,
- (LPCTSTR)_T("renderStartImage.bmp"),
- IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
- renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
- 0.3f);
-
- printf("Adding Text\n");
- renderModule->SetText(1, (uint8_t*) "WebRtc Render Demo App", 20,
- RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
- 1.0f);
-
- printf("Start render\n");
- error = renderModule->StartRender(streamId0);
- assert(error == 0);
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
- // Render this frame with the specified delay
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(1000/TEST_FRAME_RATE);
- }
- // Sleep and let all frames be rendered before closing
- SleepMs(renderDelayMs*2);
-
-
- // Shut down
- printf("Closing...\n");
- ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
- ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
- renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
- renderModule->SetText(1, NULL, 20, RGB(255,255,255),
- RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
-
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
-
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
-#endif
-
- return 0;
-}
-
-int TestMultipleStreams(VideoRender* renderModule) {
- int error = 0;
-
- // Add settings for a stream to render
- printf("Add stream 0\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 =
- renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
- assert(renderCallback0 != NULL);
- printf("Add stream 1\n");
- const int streamId1 = 1;
- VideoRenderCallback* renderCallback1 =
- renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
- assert(renderCallback1 != NULL);
- printf("Add stream 2\n");
- const int streamId2 = 2;
- VideoRenderCallback* renderCallback2 =
- renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
- assert(renderCallback2 != NULL);
- printf("Add stream 3\n");
- const int streamId3 = 3;
- VideoRenderCallback* renderCallback3 =
- renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
- assert(renderCallback3 != NULL);
- error = renderModule->StartRender(streamId0);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
- error = renderModule->StartRender(streamId1);
- assert(error == 0);
- error = renderModule->StartRender(streamId2);
- assert(error == 0);
- error = renderModule->StartRender(streamId3);
- assert(error == 0);
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame1;
- videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame2;
- videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame3;
- videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- // Render frames with the specified delay.
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
-
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
-
- GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
- videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback1->RenderFrame(streamId1, videoFrame1);
-
- GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
- videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback2->RenderFrame(streamId2, videoFrame2);
-
- GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
- videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback3->RenderFrame(streamId3, videoFrame3);
-
- SleepMs(1000/TEST_FRAME_RATE);
- }
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
- error = renderModule->StopRender(streamId1);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId1);
- assert(error == 0);
- error = renderModule->StopRender(streamId2);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId2);
- assert(error == 0);
- error = renderModule->StopRender(streamId3);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId3);
- assert(error == 0);
-
- return 0;
-}
-
-int TestExternalRender(VideoRender* renderModule) {
- int error = 0;
- MyRenderCallback *externalRender = new MyRenderCallback();
-
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 =
- renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
- 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
- error = renderModule->AddExternalRenderCallback(streamId0, externalRender);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
-
- error = renderModule->StartRender(streamId0);
- assert(error == 0);
-
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
- int frameCount = TEST_FRAME_NUM;
- for (int i=0; i<frameCount; i++) {
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(33);
- }
-
- // Sleep and let all frames be rendered before closing
- SleepMs(2*renderDelayMs);
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
- assert(frameCount == externalRender->_cnt);
-
- delete externalRender;
- externalRender = NULL;
-
- return 0;
-}
-
-void RunVideoRenderTests(void* window, VideoRenderType windowType) {
- int myId = 12345;
-
- // Create the render module
- printf("Create render module\n");
- VideoRender* renderModule = NULL;
- renderModule = VideoRender::CreateVideoRender(myId,
- window,
- false,
- windowType);
- assert(renderModule != NULL);
-
- // ##### Test single stream rendering ####
- printf("#### TestSingleStream ####\n");
- if (TestSingleStream(renderModule) != 0) {
- printf ("TestSingleStream failed\n");
- }
-
- // ##### Test fullscreen rendering ####
- printf("#### TestFullscreenStream ####\n");
- if (TestFullscreenStream(renderModule, window, windowType) != 0) {
- printf ("TestFullscreenStream failed\n");
- }
-
- // ##### Test bitmap and text ####
- printf("#### TestBitmapText ####\n");
- if (TestBitmapText(renderModule) != 0) {
- printf ("TestBitmapText failed\n");
- }
-
- // ##### Test multiple streams ####
- printf("#### TestMultipleStreams ####\n");
- if (TestMultipleStreams(renderModule) != 0) {
- printf ("TestMultipleStreams failed\n");
- }
-
- // ##### Test multiple streams ####
- printf("#### TestExternalRender ####\n");
- if (TestExternalRender(renderModule) != 0) {
- printf ("TestExternalRender failed\n");
- }
-
- delete renderModule;
- renderModule = NULL;
-
- printf("VideoRender unit tests passed.\n");
-}
-
-// Note: The Mac main is implemented in testApi_mac.mm.
-#if defined(_WIN32)
-int _tmain(int argc, _TCHAR* argv[])
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-int main(int argc, char* argv[])
-#endif
-#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
-{
- // Create a window for testing.
- void* window = NULL;
-#if defined (_WIN32)
- HWND testHwnd;
- WebRtcCreateWindow(testHwnd, 0, 352, 288);
- window = (void*)testHwnd;
- VideoRenderType windowType = kRenderWindows;
-#elif defined(WEBRTC_LINUX)
- Window testWindow;
- Display* display;
- WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
- VideoRenderType windowType = kRenderX11;
- window = (void*)testWindow;
-#endif // WEBRTC_LINUX
-
- RunVideoRenderTests(window, windowType);
- return 0;
-}
-#endif // !WEBRTC_MAC
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h
deleted file mode 100644
index 0655a5b4343..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
deleted file mode 100644
index dfee4c72983..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testAPI.h"
-
-#include <iostream>
-
-#import <Foundation/Foundation.h>
-#import <Cocoa/Cocoa.h>
-#import <AppKit/AppKit.h>
-#import <QTKit/QTKit.h>
-#include <sys/time.h>
-
-#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_render/video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
-{
- // In Cocoa, rendering is not done directly to a window like in Windows and Linux.
- // It is rendererd to a Subclass of NSOpenGLView
-
- // create cocoa container window
- NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
- NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame
- styleMask:NSTitledWindowMask
- backing:NSBackingStoreBuffered
- defer:NO];
- [outWindow orderOut:nil];
- [outWindow setTitle:@"Cocoa Renderer"];
- [outWindow setBackgroundColor:[NSColor blueColor]];
-
- // create renderer and attach to window
- NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
- cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
- [[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
-
- [outWindow makeKeyAndOrderFront:NSApp];
-
- return 0;
-}
-
-int main (int argc, const char * argv[]) {
- NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
- [NSApplication sharedApplication];
-
- CocoaRenderView* testWindow;
- WebRtcCreateWindow(testWindow, 0, 352, 288);
- VideoRenderType windowType = kRenderCocoa;
- void* window = (void*)testWindow;
-
- RunVideoRenderTests(window, windowType);
-
- [pool release];
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.gypi b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
deleted file mode 100644
index e8cc03a4b02..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render.gypi
+++ /dev/null
@@ -1,218 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'targets': [
- {
- # Note this library is missing an implementation for the video render.
- # For that targets must link with 'video_render' or
- # 'video_render_module_internal_impl' if they want to compile and use
- # the internal render as the default renderer.
- 'target_name': 'video_render_module',
- 'type': 'static_library',
- 'dependencies': [
- 'webrtc_utility',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/common_video/common_video.gyp:common_video',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
- 'sources': [
- 'external/video_render_external_impl.cc',
- 'external/video_render_external_impl.h',
- 'i_video_render.h',
- 'video_render.h',
- 'video_render_defines.h',
- 'video_render_impl.h',
- ],
- },
- {
- # Default video_render_module implementation that only supports external
- # renders.
- 'target_name': 'video_render',
- 'type': 'static_library',
- 'dependencies': [
- 'video_render_module',
- ],
- 'sources': [
- 'video_render_impl.cc',
- ],
- },
- ], # targets
-
- 'conditions': [
- ['build_with_chromium==0', {
- 'targets': [
- {
- # video_render_module implementation that supports the internal
- # video_render implementation.
- 'target_name': 'video_render_module_internal_impl',
- 'type': 'static_library',
- 'dependencies': [
- '<(webrtc_root)/common.gyp:webrtc_common',
- 'video_render_module',
- ],
- 'sources': [
- 'video_render_internal_impl.cc',
- ],
- # TODO(andrew): with the proper suffix, these files will be excluded
- # automatically.
- 'conditions': [
- ['OS=="android"', {
- 'sources': [
- 'android/video_render_android_impl.h',
- 'android/video_render_android_native_opengl2.h',
- 'android/video_render_android_surface_view.h',
- 'android/video_render_opengles20.h',
- 'android/video_render_android_impl.cc',
- 'android/video_render_android_native_opengl2.cc',
- 'android/video_render_android_surface_view.cc',
- 'android/video_render_opengles20.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lGLESv2',
- ],
- },
- }],
- ['OS=="ios"', {
- 'sources': [
- # iOS
- 'ios/open_gles20.h',
- 'ios/open_gles20.mm',
- 'ios/video_render_ios_channel.h',
- 'ios/video_render_ios_channel.mm',
- 'ios/video_render_ios_gles20.h',
- 'ios/video_render_ios_gles20.mm',
- 'ios/video_render_ios_impl.h',
- 'ios/video_render_ios_impl.mm',
- 'ios/video_render_ios_view.h',
- 'ios/video_render_ios_view.mm',
- ],
- 'xcode_settings': {
- 'CLANG_ENABLE_OBJC_ARC': 'YES',
- },
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework OpenGLES',
- '-framework QuartzCore',
- '-framework UIKit',
- ],
- },
- },
- }],
- ['OS=="linux"', {
- 'sources': [
- 'linux/video_render_linux_impl.h',
- 'linux/video_x11_channel.h',
- 'linux/video_x11_render.h',
- 'linux/video_render_linux_impl.cc',
- 'linux/video_x11_channel.cc',
- 'linux/video_x11_render.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lXext',
- ],
- },
- }],
- ['OS=="mac"', {
- 'sources': [
- 'mac/cocoa_full_screen_window.h',
- 'mac/cocoa_render_view.h',
- 'mac/video_render_agl.h',
- 'mac/video_render_mac_carbon_impl.h',
- 'mac/video_render_mac_cocoa_impl.h',
- 'mac/video_render_nsopengl.h',
- 'mac/video_render_nsopengl.mm',
- 'mac/video_render_mac_cocoa_impl.mm',
- 'mac/video_render_agl.cc',
- 'mac/video_render_mac_carbon_impl.cc',
- 'mac/cocoa_render_view.mm',
- 'mac/cocoa_full_screen_window.mm',
- ],
- }],
- ['OS=="win"', {
- 'sources': [
- 'windows/i_video_render_win.h',
- 'windows/video_render_direct3d9.h',
- 'windows/video_render_windows_impl.h',
- 'windows/video_render_direct3d9.cc',
- 'windows/video_render_windows_impl.cc',
- ],
- 'include_dirs': [
- '<(directx_sdk_path)/Include',
- ],
- }],
- ['OS=="win" and clang==1', {
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'AdditionalOptions': [
- # Disable warnings failing when compiling with Clang on Windows.
- # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
- '-Wno-comment',
- '-Wno-reorder',
- '-Wno-unused-value',
- '-Wno-unused-private-field',
- ],
- },
- },
- }],
- ] # conditions
- },
- ],
- }], # build_with_chromium==0
- ['include_tests==1 and OS!="ios"', {
- 'targets': [
- {
- # Does not compile on iOS: webrtc:4755.
- 'target_name': 'video_render_tests',
- 'type': 'executable',
- 'dependencies': [
- 'video_render_module_internal_impl',
- 'webrtc_utility',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/common_video/common_video.gyp:common_video',
- ],
- 'sources': [
- 'test/testAPI/testAPI.cc',
- 'test/testAPI/testAPI.h',
- 'test/testAPI/testAPI_android.cc',
- 'test/testAPI/testAPI_mac.mm',
- ],
- 'conditions': [
- ['OS=="mac" or OS=="linux"', {
- 'cflags': [
- '-Wno-write-strings',
- ],
- 'ldflags': [
- '-lpthread -lm',
- ],
- }],
- ['OS=="linux"', {
- 'link_settings': {
- 'libraries': [
- '-lX11',
- ],
- },
- }],
- ['OS=="mac"', {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
- ],
- },
- }],
- ] # conditions
- }, # video_render_module_test
- ], # targets
- }], # include_tests==1 and OS!=ios
- ], # conditions
-}
-
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.h b/chromium/third_party/webrtc/modules/video_render/video_render.h
deleted file mode 100644
index 84c9536e35c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render.h
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
-
-/*
- * video_render.h
- *
- * This header file together with module.h and module_common_types.h
- * contains all of the APIs that are needed for using the video render
- * module class.
- *
- */
-
-#include "webrtc/modules/include/module.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRender: public Module
-{
-public:
- /*
- * Create a video render module object
- *
- * id - unique identifier of this video render module object
- * window - pointer to the window to render to
- * fullscreen - true if this is a fullscreen renderer
- * videoRenderType - type of renderer to create
- */
- static VideoRender
- * CreateVideoRender(
- const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType =
- kRenderDefault);
-
- /*
- * Destroy a video render module object
- *
- * module - object to destroy
- */
- static void DestroyVideoRender(VideoRender* module);
-
- int64_t TimeUntilNextProcess() override = 0;
- void Process() override = 0;
-
- /**************************************************************************
- *
- * Window functions
- *
- ***************************************************************************/
-
- /*
- * Get window for this renderer
- */
- virtual void* Window() = 0;
-
- /*
- * Change render window
- *
- * window - the new render window, assuming same type as originally created.
- */
- virtual int32_t ChangeWindow(void* window) = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- /*
- * Add incoming render stream
- *
- * streamID - id of the stream to add
- * zOrder - relative render order for the streams, 0 = on top
- * left - position of the stream in the window, [0.0f, 1.0f]
- * top - position of the stream in the window, [0.0f, 1.0f]
- * right - position of the stream in the window, [0.0f, 1.0f]
- * bottom - position of the stream in the window, [0.0f, 1.0f]
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom) = 0;
- /*
- * Delete incoming render stream
- *
- * streamID - id of the stream to add
- */
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId) = 0;
-
- /*
- * Add incoming render callback, used for external rendering
- *
- * streamID - id of the stream the callback is used for
- * renderObject - the VideoRenderCallback to use for this stream, NULL to remove
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual int32_t
- AddExternalRenderCallback(const uint32_t streamId,
- VideoRenderCallback* renderObject) = 0;
-
- /*
- * Get the porperties for an incoming render stream
- *
- * streamID - [in] id of the stream to get properties for
- * zOrder - [out] relative render order for the streams, 0 = on top
- * left - [out] position of the stream in the window, [0.0f, 1.0f]
- * top - [out] position of the stream in the window, [0.0f, 1.0f]
- * right - [out] position of the stream in the window, [0.0f, 1.0f]
- * bottom - [out] position of the stream in the window, [0.0f, 1.0f]
- */
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const = 0;
- /*
- * The incoming frame rate to the module, not the rate rendered in the window.
- */
- virtual uint32_t
- GetIncomingFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Returns the number of incoming streams added to this render module
- */
- virtual uint32_t GetNumIncomingRenderStreams() const = 0;
-
- /*
- * Returns true if this render module has the streamId added, false otherwise.
- */
- virtual bool
- HasIncomingRenderStream(const uint32_t streamId) const = 0;
-
- /*
- * Registers a callback to get raw images in the same time as sent
- * to the renderer. To be used for external rendering.
- */
- virtual int32_t
- RegisterRawFrameCallback(const uint32_t streamId,
- VideoRenderCallback* callbackObj) = 0;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- /*
- * Starts rendering the specified stream
- */
- virtual int32_t StartRender(const uint32_t streamId) = 0;
-
- /*
- * Stops the renderer
- */
- virtual int32_t StopRender(const uint32_t streamId) = 0;
-
- /*
- * Resets the renderer
- * No streams are removed. The state should be as after AddStream was called.
- */
- virtual int32_t ResetRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- /*
- * Returns the preferred render video type
- */
- virtual RawVideoType PreferredVideoType() const = 0;
-
- /*
- * Returns true if the renderer is in fullscreen mode, otherwise false.
- */
- virtual bool IsFullScreen() = 0;
-
- /*
- * Gets screen resolution in pixels
- */
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const = 0;
-
- /*
- * Get the actual render rate for this stream. I.e rendered frame rate,
- * not frames delivered to the renderer.
- */
- virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Set cropping of incoming stream
- */
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- /*
- * re-configure renderer
- */
-
- // Set the expected time needed by the graphics card or external renderer,
- // i.e. frames will be released for rendering |delay_ms| before set render
- // time in the video frame.
- virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
- int32_t delay_ms) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- /*
- * Set a start image. The image is rendered before the first image has been delivered
- */
- virtual int32_t SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) = 0;
-
- /*
- * Set a timout image. The image is rendered if no videoframe has been delivered
- */
- virtual int32_t SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_defines.h b/chromium/third_party/webrtc/modules/video_render/video_render_defines.h
deleted file mode 100644
index 999707cb6e5..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_defines.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/modules/include/module_common_types.h"
-
-namespace webrtc
-{
-// Defines
-#ifndef NULL
-#define NULL 0
-#endif
-
-// Enums
-enum VideoRenderType
-{
- kRenderExternal = 0, // External
- kRenderWindows = 1, // Windows
- kRenderCocoa = 2, // Mac
- kRenderCarbon = 3,
- kRenderiOS = 4, // iPhone
- kRenderAndroid = 5, // Android
- kRenderX11 = 6, // Linux
- kRenderDefault
-};
-
-// Runtime errors
-enum VideoRenderError
-{
- kRenderShutDown = 0,
- kRenderPerformanceAlarm = 1
-};
-
-// Feedback class to be implemented by module user
-class VideoRenderFeedback
-{
-public:
- virtual void OnRenderError(const int32_t streamId,
- const VideoRenderError error) = 0;
-
-protected:
- virtual ~VideoRenderFeedback()
- {
- }
-};
-
-// Mobile enums
-enum StretchMode
-{
- kStretchToInsideEdge = 1,
- kStretchToOutsideEdge = 2,
- kStretchMatchWidth = 3,
- kStretchMatchHeight = 4,
- kStretchNone = 5
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc b/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc
deleted file mode 100644
index 75403f8dd53..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc
+++ /dev/null
@@ -1,602 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/video_render_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRender*
-VideoRender::CreateVideoRender(const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType/*=kRenderDefault*/)
-{
- VideoRenderType resultVideoRenderType = videoRenderType;
- if (videoRenderType == kRenderDefault)
- {
- resultVideoRenderType = kRenderExternal;
- }
- return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
- fullscreen);
-}
-
-void VideoRender::DestroyVideoRender(
- VideoRender* module)
-{
- if (module)
- {
- delete module;
- }
-}
-
-ModuleVideoRenderImpl::ModuleVideoRenderImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
-{
-
- // Create platform specific renderer
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl* ptrRenderer(NULL);
- ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
- window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
- default:
- // Error...
- break;
- }
- if (_ptrRenderer)
- {
- if (_ptrRenderer->Init() == -1)
- {
- }
- }
-}
-
-ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
-{
- delete &_moduleCrit;
-
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- delete it->second;
- }
-
- // Delete platform specific renderer
- if (_ptrRenderer)
- {
- VideoRenderType videoRenderType = _ptrRenderer->RenderType();
-
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl
- * ptrRenderer =
- reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-
- default:
- // Error...
- break;
- }
- }
-}
-
-int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
-{
- // Not used
- return 50;
-}
-void ModuleVideoRenderImpl::Process() {}
-
-void*
-ModuleVideoRenderImpl::Window()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _ptrWindow;
-}
-
-int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::Id()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _id;
-}
-
-uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
-
- if (it == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: stream doesn't exist",
- __FUNCTION__);
- return 0;
- }
- assert(it->second != NULL);
- return it->second->IncomingRate();
-}
-
-VideoRenderCallback*
-ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return NULL;
- }
-
- if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
- // The stream already exists...
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream already exists", __FUNCTION__);
- return NULL;
- }
-
- VideoRenderCallback* ptrRenderCallback =
- _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
- right, bottom);
- if (ptrRenderCallback == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Can't create incoming stream in renderer",
- __FUNCTION__);
- return NULL;
- }
-
- // Create platform independant code
- IncomingVideoStream* ptrIncomingStream =
- new IncomingVideoStream(streamId, false);
- ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
- VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
-
- // Store the stream
- _streamRenderMap[streamId] = ptrIncomingStream;
-
- return moduleCallback;
-}
-
-int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- delete item->second;
-
- _ptrRenderer->DeleteIncomingRenderStream(streamId);
-
- _streamRenderMap.erase(item);
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
- const uint32_t streamId,
- VideoRenderCallback* renderObject) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- if (item->second == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get stream", __FUNCTION__);
- return -1;
- }
- item->second->SetExternalCallback(renderObject);
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
-}
-
-uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- return static_cast<uint32_t>(_streamRenderMap.size());
-}
-
-bool ModuleVideoRenderImpl::HasIncomingRenderStream(
- const uint32_t streamId) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- return _streamRenderMap.find(streamId) != _streamRenderMap.end();
-}
-
-int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
- const uint32_t streamId,
- VideoRenderCallback* callbackObj) {
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- // Start the stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Start() == -1)
- {
- return -1;
- }
-
- // Start the HW renderer
- if (_ptrRenderer->StartRender() == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%d): No renderer", __FUNCTION__, streamId);
- return -1;
- }
-
- // Stop the incoming stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Stop() == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::ResetRender()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- int32_t ret = 0;
- // Loop through all incoming streams and reset them
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- if (it->second->Reset() == -1)
- ret = -1;
- }
- return ret;
-}
-
-RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (_ptrRenderer == NULL)
- {
- return kVideoI420;
- }
-
- return _ptrRenderer->PerferedVideoType();
-}
-
-bool ModuleVideoRenderImpl::IsFullScreen()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->FullScreen();
-}
-
-int32_t ModuleVideoRenderImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t ModuleVideoRenderImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->RenderFrameRate(streamId);
-}
-
-int32_t ModuleVideoRenderImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetTransparentBackground(enable);
-}
-
-int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
- backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
- right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
- uint32_t stream_id, int32_t delay_ms) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(stream_id);
- if (item == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
- delay_ms);
- return -1;
- }
-
- assert(item->second != NULL);
- return item->second->SetExpectedRenderDelay(delay_ms);
-}
-
-int32_t ModuleVideoRenderImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
- bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert (item->second != NULL);
- item->second->SetStartImage(videoFrame);
- return 0;
-
-}
-
-int32_t ModuleVideoRenderImpl::SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert(item->second != NULL);
- item->second->SetTimeoutImage(videoFrame, timeout);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_impl.h b/chromium/third_party/webrtc/modules/video_render/video_render_impl.h
deleted file mode 100644
index 12244a60b82..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_impl.h
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
-
-#include <map>
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class IncomingVideoStream;
-class IVideoRender;
-
-// Class definitions
-class ModuleVideoRenderImpl: public VideoRender
-{
-public:
- /*
- * VideoRenderer constructor/destructor
- */
- ModuleVideoRenderImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~ModuleVideoRenderImpl();
-
- virtual int64_t TimeUntilNextProcess();
- virtual void Process();
-
- /*
- * Returns the render window
- */
- virtual void* Window();
-
- /*
- * Change render window
- */
- virtual int32_t ChangeWindow(void* window);
-
- /*
- * Returns module id
- */
- int32_t Id();
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- /*
- * Add incoming render stream
- */
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
- /*
- * Delete incoming render stream
- */
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- /*
- * Add incoming render callback, used for external rendering
- */
- virtual int32_t
- AddExternalRenderCallback(const uint32_t streamId,
- VideoRenderCallback* renderObject);
-
- /*
- * Get the porperties for an incoming render stream
- */
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
- /*
- * Incoming frame rate for the specified stream.
- */
- virtual uint32_t GetIncomingFrameRate(const uint32_t streamId);
-
- /*
- * Returns the number of incoming streams added to this render module
- */
- virtual uint32_t GetNumIncomingRenderStreams() const;
-
- /*
- * Returns true if this render module has the streamId added, false otherwise.
- */
- virtual bool HasIncomingRenderStream(const uint32_t streamId) const;
-
- /*
- *
- */
- virtual int32_t
- RegisterRawFrameCallback(const uint32_t streamId,
- VideoRenderCallback* callbackObj);
-
- virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
- int32_t delay_ms);
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- /*
- * Starts rendering the specified stream
- */
- virtual int32_t StartRender(const uint32_t streamId);
-
- /*
- * Stops the renderer
- */
- virtual int32_t StopRender(const uint32_t streamId);
-
- /*
- * Sets the renderer in start state, no streams removed.
- */
- virtual int32_t ResetRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- /*
- * Returns the prefered render video type
- */
- virtual RawVideoType PreferredVideoType() const;
-
- /*
- * Returns true if the renderer is in fullscreen mode, otherwise false.
- */
- virtual bool IsFullScreen();
-
- /*
- * Gets screen resolution in pixels
- */
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- /*
- * Get the actual render rate for this stream. I.e rendered frame rate,
- * not frames delivered to the renderer.
- */
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- /*
- * Set cropping of incoming stream
- */
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t FullScreenRender(void* window, const bool enable);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- virtual int32_t SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _moduleCrit;
- void* _ptrWindow;
- bool _fullScreen;
-
- IVideoRender* _ptrRenderer;
- typedef std::map<uint32_t, IncomingVideoStream*> IncomingVideoStreamMap;
- IncomingVideoStreamMap _streamRenderMap;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_internal.h b/chromium/third_party/webrtc/modules/video_render/video_render_internal.h
deleted file mode 100644
index 0508c1a7087..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_internal.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
-
-#ifdef ANDROID
-#include <jni.h>
-
-namespace webrtc {
-
-// In order to be able to use the internal webrtc video render
-// for android, the jvm objects must be set via this method.
-int32_t SetRenderAndroidVM(JavaVM* javaVM);
-
-} // namespace webrtc
-
-#endif // ANDROID
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc b/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc
deleted file mode 100644
index 2090fce5f86..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc
+++ /dev/null
@@ -1,825 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/video_render_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-#if defined (_WIN32)
-#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
-#define STANDARD_RENDERING kRenderWindows
-
-// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
-// gets defined if WEBRTC_IOS is defined
-#elif defined(WEBRTC_IOS)
-#define STANDARD_RENDERING kRenderiOS
-#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
-#elif defined(WEBRTC_MAC)
-#if defined(COCOA_RENDERING)
-#define STANDARD_RENDERING kRenderCocoa
-#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
-#elif defined(CARBON_RENDERING)
-#define STANDARD_RENDERING kRenderCarbon
-#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
-#endif
-
-#elif defined(WEBRTC_ANDROID)
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
-#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
-#define STANDARD_RENDERING kRenderAndroid
-
-#elif defined(WEBRTC_LINUX)
-#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
-#define STANDARD_RENDERING kRenderX11
-
-#else
-//Other platforms
-#endif
-
-// For external rendering
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-#ifndef STANDARD_RENDERING
-#define STANDARD_RENDERING kRenderExternal
-#endif // STANDARD_RENDERING
-
-namespace webrtc {
-
-VideoRender*
-VideoRender::CreateVideoRender(const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType/*=kRenderDefault*/)
-{
- VideoRenderType resultVideoRenderType = videoRenderType;
- if (videoRenderType == kRenderDefault)
- {
- resultVideoRenderType = STANDARD_RENDERING;
- }
- return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
- fullscreen);
-}
-
-void VideoRender::DestroyVideoRender(
- VideoRender* module)
-{
- if (module)
- {
- delete module;
- }
-}
-
-ModuleVideoRenderImpl::ModuleVideoRenderImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
-{
-
- // Create platform specific renderer
- switch (videoRenderType)
- {
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer;
- ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
-
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
- {
- AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- else
- {
- AndroidSurfaceViewRenderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
-
- }
- break;
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = NULL;
- ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
- if ( ptrRenderer )
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
-
-#else
- // Other platforms
-#endif
- case kRenderExternal:
- {
- VideoRenderExternalImpl* ptrRenderer(NULL);
- ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
- window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
- default:
- // Error...
- break;
- }
- if (_ptrRenderer)
- {
- if (_ptrRenderer->Init() == -1)
- {
- }
- }
-}
-
-ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
-{
- delete &_moduleCrit;
-
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- delete it->second;
- }
-
- // Delete platform specific renderer
- if (_ptrRenderer)
- {
- VideoRenderType videoRenderType = _ptrRenderer->RenderType();
-
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl
- * ptrRenderer =
- reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#else
- //other platforms
-#endif
-
- default:
- // Error...
- break;
- }
- }
-}
-
-int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
-{
- // Not used
- return 50;
-}
-void ModuleVideoRenderImpl::Process() {}
-
-void*
-ModuleVideoRenderImpl::Window()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _ptrWindow;
-}
-
-int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
-{
-
- CriticalSectionScoped cs(&_moduleCrit);
-
-#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
- VideoRenderIosImpl* ptrRenderer;
- ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-#elif defined(WEBRTC_MAC)
-
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
-#if defined(COCOA_RENDERING)
- VideoRenderMacCocoaImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
-#elif defined(CARBON_RENDERING)
- VideoRenderMacCarbonImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
-#endif
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-
-#else
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->ChangeWindow(window);
-
-#endif
-}
-
-int32_t ModuleVideoRenderImpl::Id()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _id;
-}
-
-uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
-
- if (it == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: stream doesn't exist",
- __FUNCTION__);
- return 0;
- }
- assert(it->second != NULL);
- return it->second->IncomingRate();
-}
-
-VideoRenderCallback*
-ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return NULL;
- }
-
- if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
- // The stream already exists...
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream already exists", __FUNCTION__);
- return NULL;
- }
-
- VideoRenderCallback* ptrRenderCallback =
- _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
- right, bottom);
- if (ptrRenderCallback == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Can't create incoming stream in renderer",
- __FUNCTION__);
- return NULL;
- }
-
- // Create platform independant code
- IncomingVideoStream* ptrIncomingStream =
- new IncomingVideoStream(streamId, false);
- ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
- VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
-
- // Store the stream
- _streamRenderMap[streamId] = ptrIncomingStream;
-
- return moduleCallback;
-}
-
-int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- delete item->second;
-
- _ptrRenderer->DeleteIncomingRenderStream(streamId);
-
- _streamRenderMap.erase(item);
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
- const uint32_t streamId,
- VideoRenderCallback* renderObject) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- if (item->second == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get stream", __FUNCTION__);
- return -1;
- }
- item->second->SetExternalCallback(renderObject);
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
-}
-
-uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- return static_cast<uint32_t>(_streamRenderMap.size());
-}
-
-bool ModuleVideoRenderImpl::HasIncomingRenderStream(
- const uint32_t streamId) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- return _streamRenderMap.find(streamId) != _streamRenderMap.end();
-}
-
-int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
- const uint32_t streamId,
- VideoRenderCallback* callbackObj) {
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- // Start the stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Start() == -1)
- {
- return -1;
- }
-
- // Start the HW renderer
- if (_ptrRenderer->StartRender() == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%d): No renderer", __FUNCTION__, streamId);
- return -1;
- }
-
- // Stop the incoming stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Stop() == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::ResetRender()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- int32_t ret = 0;
- // Loop through all incoming streams and reset them
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- if (it->second->Reset() == -1)
- ret = -1;
- }
- return ret;
-}
-
-RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (_ptrRenderer == NULL)
- {
- return kVideoI420;
- }
-
- return _ptrRenderer->PerferedVideoType();
-}
-
-bool ModuleVideoRenderImpl::IsFullScreen()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->FullScreen();
-}
-
-int32_t ModuleVideoRenderImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t ModuleVideoRenderImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->RenderFrameRate(streamId);
-}
-
-int32_t ModuleVideoRenderImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetTransparentBackground(enable);
-}
-
-int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
- backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
- right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
- uint32_t stream_id, int32_t delay_ms) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(stream_id);
- if (item == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
- delay_ms);
- return -1;
- }
-
- assert(item->second != NULL);
- return item->second->SetExpectedRenderDelay(delay_ms);
-}
-
-int32_t ModuleVideoRenderImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
- bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert (item->second != NULL);
- item->second->SetStartImage(videoFrame);
- return 0;
-
-}
-
-int32_t ModuleVideoRenderImpl::SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert(item->second != NULL);
- item->second->SetTimeoutImage(videoFrame, timeout);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h b/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h
deleted file mode 100644
index 6dbb4fd3cb9..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
-
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-
-// Class definitions
-class IVideoRenderWin
-{
-public:
- /**************************************************************************
- *
- * Constructor/destructor
- *
- ***************************************************************************/
- virtual ~IVideoRenderWin()
- {
- };
-
- virtual int32_t Init() = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * CreateChannel(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t DeleteChannel(const uint32_t streamId) = 0;
-
- virtual int32_t GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) = 0;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender() = 0;
-
- virtual int32_t StopRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual bool IsFullScreen() = 0;
-
- virtual int32_t SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t ChangeWindow(void* window) = 0;
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory) = 0;
-
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc
deleted file mode 100644
index b59b944e483..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc
+++ /dev/null
@@ -1,1160 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Own include file
-#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
-
-// System include files
-#include <windows.h>
-
-// WebRtc include files
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// A structure for our custom vertex type
-struct CUSTOMVERTEX
-{
- FLOAT x, y, z;
- DWORD color; // The vertex color
- FLOAT u, v;
-};
-
-// Our custom FVF, which describes our custom vertex structure
-#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1)
-
-/*
- *
- * D3D9Channel
- *
- */
-D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
- CriticalSectionWrapper* critSect,
- Trace* trace) :
- _width(0),
- _height(0),
- _pd3dDevice(pd3DDevice),
- _pTexture(NULL),
- _bufferIsUpdated(false),
- _critSect(critSect),
- _streamId(0),
- _zOrder(0),
- _startWidth(0),
- _startHeight(0),
- _stopWidth(0),
- _stopHeight(0)
-{
-
-}
-
-D3D9Channel::~D3D9Channel()
-{
- //release the texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-}
-
-void D3D9Channel::SetStreamSettings(uint16_t streamId,
- uint32_t zOrder,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight)
-{
- _streamId = streamId;
- _zOrder = zOrder;
- _startWidth = startWidth;
- _startHeight = startHeight;
- _stopWidth = stopWidth;
- _stopHeight = stopHeight;
-}
-
-int D3D9Channel::GetStreamSettings(uint16_t streamId,
- uint32_t& zOrder,
- float& startWidth,
- float& startHeight,
- float& stopWidth,
- float& stopHeight)
-{
- streamId = _streamId;
- zOrder = _zOrder;
- startWidth = _startWidth;
- startHeight = _startHeight;
- stopWidth = _stopWidth;
- stopHeight = _stopHeight;
- return 0;
-}
-
-int D3D9Channel::GetTextureWidth()
-{
- return _width;
-}
-
-int D3D9Channel::GetTextureHeight()
-{
- return _height;
-}
-
-// Called from video engine when a the frame size changed
-int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "FrameSizeChange, wifth: %d, height: %d, streams: %d", width,
- height, numberOfStreams);
-
- CriticalSectionScoped cs(_critSect);
- _width = width;
- _height = height;
-
- //clean the previous texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-
- HRESULT ret = E_POINTER;
-
- if (_pd3dDevice)
- ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
- D3DPOOL_MANAGED, &_pTexture, NULL);
-
- if (FAILED(ret))
- {
- _pTexture = NULL;
- return -1;
- }
-
- return 0;
-}
-
-int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(_critSect);
- if (_width != videoFrame.width() || _height != videoFrame.height())
- {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1)
- {
- return -1;
- }
- }
- return DeliverFrame(videoFrame);
-}
-
-// Called from video engine when a new frame should be rendered.
-int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "DeliverFrame to D3D9Channel");
-
- CriticalSectionScoped cs(_critSect);
-
- // FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
- // update the texture? probably not
- if (_bufferIsUpdated) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "Last frame hasn't been rendered yet. Drop this frame.");
- return -1;
- }
-
- if (!_pd3dDevice) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "D3D for rendering not initialized.");
- return -1;
- }
-
- if (!_pTexture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Texture for rendering not initialized.");
- return -1;
- }
-
- D3DLOCKED_RECT lr;
-
- if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock a texture in D3D9 Channel.");
- return -1;
- }
- UCHAR* pRect = (UCHAR*) lr.pBits;
-
- ConvertFromI420(videoFrame, kARGB, 0, pRect);
-
- if (FAILED(_pTexture->UnlockRect(0))) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to unlock a texture in D3D9 Channel.");
- return -1;
- }
-
- _bufferIsUpdated = true;
- return 0;
-}
-
-// Called by d3d channel owner to indicate the frame/texture has been rendered off
-int D3D9Channel::RenderOffFrame()
-{
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "Frame has been rendered to the screen.");
- CriticalSectionScoped cs(_critSect);
- _bufferIsUpdated = false;
- return 0;
-}
-
-// Called by d3d channel owner to check if the texture is updated
-int D3D9Channel::IsUpdated(bool& isUpdated)
-{
- CriticalSectionScoped cs(_critSect);
- isUpdated = _bufferIsUpdated;
- return 0;
-}
-
-// Called by d3d channel owner to get the texture
-LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture()
-{
- CriticalSectionScoped cs(_critSect);
- return _pTexture;
-}
-
-int D3D9Channel::ReleaseTexture()
-{
- CriticalSectionScoped cs(_critSect);
-
- //release the texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
- _pd3dDevice = NULL;
- return 0;
-}
-
-int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice)
-{
- CriticalSectionScoped cs(_critSect);
-
- _pd3dDevice = pd3DDevice;
-
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-
- HRESULT ret;
-
- ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
- D3DPOOL_MANAGED, &_pTexture, NULL);
-
- if (FAILED(ret))
- {
- _pTexture = NULL;
- return -1;
- }
-
- return 0;
-}
-
-/*
- *
- * VideoRenderDirect3D9
- *
- */
-VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
- HWND hWnd,
- bool fullScreen) :
- _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
- _trace(trace),
- _hWnd(hWnd),
- _fullScreen(fullScreen),
- _pTextureLogo(NULL),
- _pVB(NULL),
- _pd3dDevice(NULL),
- _pD3D(NULL),
- _d3dChannels(),
- _d3dZorder(),
- _screenUpdateEvent(NULL),
- _logoLeft(0),
- _logoTop(0),
- _logoRight(0),
- _logoBottom(0),
- _pd3dSurface(NULL),
- _totalMemory(0),
- _availableMemory(0)
-{
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
- _screenUpdateEvent = EventTimerWrapper::Create();
- SetRect(&_originalHwndRect, 0, 0, 0, 0);
-}
-
-VideoRenderDirect3D9::~VideoRenderDirect3D9()
-{
- //NOTE: we should not enter CriticalSection in here!
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- }
- delete _screenUpdateEvent;
-
- //close d3d device
- CloseDevice();
-
- // Delete all channels
- std::map<int, D3D9Channel*>::iterator it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- delete it->second;
- it = _d3dChannels.erase(it);
- }
- // Clean the zOrder map
- _d3dZorder.clear();
-
- if (_fullScreen)
- {
- // restore hwnd to original size and position
- ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
- _originalHwndRect.top, _originalHwndRect.right
- - _originalHwndRect.left,
- _originalHwndRect.bottom - _originalHwndRect.top,
- SWP_FRAMECHANGED);
- ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
- | RDW_ERASE);
- ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
- | RDW_ERASE);
- }
-
- delete &_refD3DCritsect;
-}
-
-DWORD VideoRenderDirect3D9::GetVertexProcessingCaps()
-{
- D3DCAPS9 caps;
- DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
- if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
- &caps)))
- {
- if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
- == D3DDEVCAPS_HWTRANSFORMANDLIGHT)
- {
- dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING;
- }
- }
- return dwVertexProcessing;
-}
-
-int VideoRenderDirect3D9::InitializeD3D(HWND hWnd,
- D3DPRESENT_PARAMETERS* pd3dpp)
-{
- // initialize Direct3D
- if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
- return -1;
- }
-
- // determine what type of vertex processing to use based on the device capabilities
- DWORD dwVertexProcessing = GetVertexProcessingCaps();
-
- // get the display mode
- D3DDISPLAYMODE d3ddm;
- _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm);
- pd3dpp->BackBufferFormat = d3ddm.Format;
-
- // create the D3D device
- if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
- dwVertexProcessing | D3DCREATE_MULTITHREADED
- | D3DCREATE_FPU_PRESERVE, pd3dpp,
- &_pd3dDevice)))
- {
- //try the ref device
- if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF,
- hWnd, dwVertexProcessing
- | D3DCREATE_MULTITHREADED
- | D3DCREATE_FPU_PRESERVE,
- pd3dpp, &_pd3dDevice)))
- {
- return -1;
- }
- }
-
- return 0;
-}
-
-int VideoRenderDirect3D9::ResetDevice()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice");
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- //release the channel texture
- std::map<int, D3D9Channel*>::iterator it;
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- if (it->second)
- {
- it->second->ReleaseTexture();
- }
- it++;
- }
-
- //close d3d device
- if (CloseDevice() != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice failed to CloseDevice");
- return -1;
- }
-
- //reinit d3d device
- if (InitDevice() != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice failed to InitDevice");
- return -1;
- }
-
- //recreate channel texture
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- if (it->second)
- {
- it->second->RecreateTexture(_pd3dDevice);
- }
- it++;
- }
-
- return 0;
-}
-
-int VideoRenderDirect3D9::InitDevice()
-{
- // Set up the structure used to create the D3DDevice
- ZeroMemory(&_d3dpp, sizeof(_d3dpp));
- _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
- _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
- if (GetWindowRect(_hWnd, &_originalHwndRect) == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::InitDevice Could not get window size");
- return -1;
- }
- if (!_fullScreen)
- {
- _winWidth = _originalHwndRect.right - _originalHwndRect.left;
- _winHeight = _originalHwndRect.bottom - _originalHwndRect.top;
- _d3dpp.Windowed = TRUE;
- _d3dpp.BackBufferHeight = 0;
- _d3dpp.BackBufferWidth = 0;
- }
- else
- {
- _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN);
- _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN);
- _d3dpp.Windowed = FALSE;
- _d3dpp.BackBufferWidth = _winWidth;
- _d3dpp.BackBufferHeight = _winHeight;
- _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- }
-
- if (InitializeD3D(_hWnd, &_d3dpp) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::InitDevice failed in InitializeD3D");
- return -1;
- }
-
- // Turn off culling, so we see the front and back of the triangle
- _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
-
- // Turn off D3D lighting, since we are providing our own vertex colors
- _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
-
- // Settings for alpha blending
- _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
- _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
- _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
-
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR );
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR );
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR );
-
- // Initialize Vertices
- CUSTOMVERTEX Vertices[] = {
- //front
- { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f,
- 0xffffffff, 0, 0 },
- { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f,
- 0xffffffff, 1, 0 } };
-
- // Create the vertex buffer.
- if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0,
- D3DFVF_CUSTOMVERTEX,
- D3DPOOL_DEFAULT, &_pVB, NULL )))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to create the vertex buffer.");
- return -1;
- }
-
- // Now we fill the vertex buffer.
- VOID* pVertices;
- if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock the vertex buffer.");
- return -1;
- }
- memcpy(pVertices, Vertices, sizeof(Vertices));
- _pVB->Unlock();
-
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::Init()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::Init");
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- // Start rendering thread...
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created");
- return -1;
- }
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- DEVMODE dm;
- // initialize the DEVMODE structure
- ZeroMemory(&dm, sizeof(dm));
- dm.dmSize = sizeof(dm);
- if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm))
- {
- monitorFreq = dm.dmDisplayFrequency;
- }
- _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq);
-
- return InitDevice();
-}
-
-int32_t VideoRenderDirect3D9::ChangeWindow(void* window)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return -1;
-}
-
-int VideoRenderDirect3D9::UpdateRenderSurface()
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- // Check if there are any updated buffers
- bool updated = false;
- std::map<int, D3D9Channel*>::iterator it;
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
-
- D3D9Channel* channel = it->second;
- channel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
- //nothing is updated, continue
- if (!updated)
- return -1;
-
- // Clear the backbuffer to a black color
- _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f,
- 0);
-
- // Begin the scene
- if (SUCCEEDED(_pd3dDevice->BeginScene()))
- {
- _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX));
- _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX);
-
- //draw all the channels
- //get texture from the channels
- LPDIRECT3DTEXTURE9 textureFromChannel = NULL;
- DWORD textureWidth, textureHeight;
-
- std::multimap<int, unsigned int>::reverse_iterator it;
- it = _d3dZorder.rbegin();
- while (it != _d3dZorder.rend())
- {
- // loop through all channels and streams in Z order
- int channel = it->second & 0x0000ffff;
-
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel);
- if (ddIt != _d3dChannels.end())
- {
- // found the channel
- D3D9Channel* channelObj = ddIt->second;
- if (channelObj)
- {
- textureFromChannel = channelObj->GetTexture();
- textureWidth = channelObj->GetTextureWidth();
- textureHeight = channelObj->GetTextureHeight();
-
- uint32_t zOrder;
- float startWidth, startHeight, stopWidth, stopHeight;
- channelObj->GetStreamSettings(0, zOrder, startWidth,
- startHeight, stopWidth,
- stopHeight);
-
- //draw the video stream
- UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight,
- stopWidth, stopHeight);
- _pd3dDevice->SetTexture(0, textureFromChannel);
- _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
-
- //Notice channel that this frame as been rendered
- channelObj->RenderOffFrame();
- }
- }
- it++;
- }
-
- //draw the logo
- if (_pTextureLogo)
- {
- UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight,
- _logoBottom);
- _pd3dDevice->SetTexture(0, _pTextureLogo);
- _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
- }
-
- // End the scene
- _pd3dDevice->EndScene();
- }
-
- // Present the backbuffer contents to the display
- _pd3dDevice->Present(NULL, NULL, NULL, NULL );
-
- return 0;
-}
-
-//set the alpha value of the pixal with a particular colorkey as 0
-int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
- DDCOLORKEY* transparentColorKey,
- DWORD width,
- DWORD height)
-{
- D3DLOCKED_RECT lr;
- if (!pTexture)
- return -1;
-
- CriticalSectionScoped cs(&_refD3DCritsect);
- if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD)))
- {
- for (DWORD y = 0; y < height; y++)
- {
- DWORD dwOffset = y * width;
-
- for (DWORD x = 0; x < width; x)
- {
- DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x];
- if ((temp & 0x00FFFFFF)
- == transparentColorKey->dwColorSpaceLowValue)
- {
- temp &= 0x00FFFFFF;
- }
- else
- {
- temp |= 0xFF000000;
- }
- ((DWORD*) lr.pBits)[dwOffset + x] = temp;
- x++;
- }
- }
- pTexture->UnlockRect(0);
- return 0;
- }
- return -1;
-}
-
-/*
- *
- * Rendering process
- *
- */
-bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderDirect3D9*> (obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderDirect3D9::ScreenUpdateProcess()
-{
- _screenUpdateEvent->Wait(100);
-
- if (!_screenUpdateThread)
- {
- //stop the thread
- return false;
- }
- if (!_pd3dDevice)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "d3dDevice not created.");
- return true;
- }
-
- HRESULT hr = _pd3dDevice->TestCooperativeLevel();
-
- if (SUCCEEDED(hr))
- {
- UpdateRenderSurface();
- }
-
- if (hr == D3DERR_DEVICELOST)
- {
- //Device is lost and cannot be reset yet
-
- }
- else if (hr == D3DERR_DEVICENOTRESET)
- {
- //Lost but we can reset it now
- //Note: the standard way is to call Reset, however for some reason doesn't work here.
- //so we will release the device and create it again.
- ResetDevice();
- }
-
- return true;
-}
-
-int VideoRenderDirect3D9::CloseDevice()
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::CloseDevice");
-
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
-
- if (_pVB != NULL)
- {
- _pVB->Release();
- _pVB = NULL;
- }
-
- if (_pd3dDevice != NULL)
- {
- _pd3dDevice->Release();
- _pd3dDevice = NULL;
- }
-
- if (_pD3D != NULL)
- {
- _pD3D->Release();
- _pD3D = NULL;
- }
-
- if (_pd3dSurface != NULL)
- _pd3dSurface->Release();
- return 0;
-}
-
-D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return NULL;
- }
- return ddobj;
-}
-
-int32_t VideoRenderDirect3D9::DeleteChannel(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
-
- std::multimap<int, unsigned int>::iterator it;
- it = _d3dZorder.begin();
- while (it != _d3dZorder.end())
- {
- if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff))
- {
- it = _d3dZorder.erase(it);
- break;
- }
- it++;
- }
-
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(streamId & 0x0000ffff);
- if (ddIt != _d3dChannels.end())
- {
- delete ddIt->second;
- _d3dChannels.erase(ddIt);
- return 0;
- }
- return -1;
-}
-
-VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const uint32_t channel,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- //FIXME this should be done in VideoAPIWindows? stop the frame deliver first
- //remove the old channel
- DeleteChannel(channel);
-
- D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice,
- &_refD3DCritsect, _trace);
- d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom);
-
- // store channel
- _d3dChannels[channel & 0x0000ffff] = d3dChannel;
-
- // store Z order
- // default streamID is 0
- _d3dZorder.insert(
- std::pair<int, unsigned int>(zOrder, channel & 0x0000ffff));
-
- return d3dChannel;
-}
-
-int32_t VideoRenderDirect3D9::GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return -1;
- }
- // Only allow one stream per channel, demuxing is
- return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom);
-}
-
-int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB,
- int offset,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight)
-{
- if (pVB == NULL)
- return -1;
-
- float left, right, top, bottom;
-
- //update the vertice buffer
- //0,1 => -1,1
- left = startWidth * 2 - 1;
- right = stopWidth * 2 - 1;
-
- //0,1 => 1,-1
- top = 1 - startHeight * 2;
- bottom = 1 - stopHeight * 2;
-
- CUSTOMVERTEX newVertices[] = {
- //logo
- { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f,
- 0xffffffff, 0, 0 },
- { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f,
- 0xffffffff, 1, 0 }, };
- // Now we fill the vertex buffer.
- VOID* pVertices;
- if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices),
- (void**) &pVertices, 0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock the vertex buffer.");
- return -1;
- }
- memcpy(pVertices, newVertices, sizeof(newVertices));
- pVB->Unlock();
-
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::StartRender()
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::StopRender()
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-bool VideoRenderDirect3D9::IsFullScreen()
-{
- return _fullScreen;
-}
-
-int32_t VideoRenderDirect3D9::SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetTransparentBackground(
- const bool enable)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom)
-{
- if (!bitMap)
- {
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap.");
- return 0;
- }
-
- // sanity
- if (left > 1.0f || left < 0.0f ||
- top > 1.0f || top < 0.0f ||
- right > 1.0f || right < 0.0f ||
- bottom > 1.0f || bottom < 0.0f)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D SetBitmap invalid parameter");
- return -1;
- }
-
- if ((bottom <= top) || (right <= left))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D SetBitmap invalid parameter");
- return -1;
- }
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- unsigned char* srcPtr;
- HGDIOBJ oldhand;
- BITMAPINFO pbi;
- BITMAP bmap;
- HDC hdcNew;
- hdcNew = CreateCompatibleDC(0);
- // Fill out the BITMAP structure.
- GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap);
- //Select the bitmap handle into the new device context.
- oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap);
- // we are done with this object
- DeleteObject(oldhand);
- pbi.bmiHeader.biSize = 40;
- pbi.bmiHeader.biWidth = bmap.bmWidth;
- pbi.bmiHeader.biHeight = bmap.bmHeight;
- pbi.bmiHeader.biPlanes = 1;
- pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
- pbi.bmiHeader.biCompression = BI_RGB;
- pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
- srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4];
- // the original un-stretched image in RGB24
- int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi,
- DIB_RGB_COLORS);
- if (pixelHeight == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D failed to GetDIBits in SetBitmap");
- delete[] srcPtr;
- return -1;
- }
- DeleteDC(hdcNew);
- if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D failed to SetBitmap invalid bit depth");
- delete[] srcPtr;
- return -1;
- }
-
- HRESULT ret;
- //release the previous logo texture
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
- ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0,
- D3DFMT_A8R8G8B8, D3DPOOL_MANAGED,
- &_pTextureLogo, NULL);
- if (FAILED(ret))
- {
- _pTextureLogo = NULL;
- delete[] srcPtr;
- return -1;
- }
- if (!_pTextureLogo)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Texture for rendering not initialized.");
- delete[] srcPtr;
- return -1;
- }
-
- D3DLOCKED_RECT lr;
- if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0)))
- {
- delete[] srcPtr;
- return -1;
- }
- unsigned char* dstPtr = (UCHAR*) lr.pBits;
- int pitch = bmap.bmWidth * 4;
-
- if (pbi.bmiHeader.biBitCount == 24)
- {
- ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0);
- }
- else
- {
- unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
- for (int i = 0; i < bmap.bmHeight; ++i)
- {
- memcpy(dstPtr, srcTmp, bmap.bmWidth * 4);
- srcTmp -= bmap.bmWidth * 4;
- dstPtr += pitch;
- }
- }
-
- delete[] srcPtr;
- if (FAILED(_pTextureLogo->UnlockRect(0)))
- {
- return -1;
- }
-
- if (colorKey)
- {
- DDCOLORKEY* ddColorKey =
- static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
- SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth,
- bmap.bmHeight);
- }
-
- //update the vertice buffer
- //0,1 => -1,1
- _logoLeft = left;
- _logoRight = right;
-
- //0,1 => 1,-1
- _logoTop = top;
- _logoBottom = bottom;
-
- return 0;
-
-}
-
-int32_t VideoRenderDirect3D9::GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory)
-{
- totalMemory = _totalMemory;
- availableMemory = _availableMemory;
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return -1;
- }
- // Only allow one stream per channel, demuxing is
- ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom);
-
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h b/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h
deleted file mode 100644
index eaa8c147e2e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
-
-#include <memory>
-
-#include "webrtc/modules/video_render/windows/i_video_render_win.h"
-
-#include <d3d9.h>
-#include <ddraw.h>
-
-#include <Map>
-
-// Added
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventTimerWrapper;
-class Trace;
-
-class D3D9Channel: public VideoRenderCallback
-{
-public:
- D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
- CriticalSectionWrapper* critSect, Trace* trace);
-
- virtual ~D3D9Channel();
-
- // Inherited from VideoRencerCallback, called from VideoAPI class.
- // Called when the incomming frame size and/or number of streams in mix changes
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
-
- // A new frame is delivered.
- virtual int DeliverFrame(const VideoFrame& videoFrame);
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- // Called to check if the video frame is updated.
- int IsUpdated(bool& isUpdated);
- // Called after the video frame has been render to the screen
- int RenderOffFrame();
- // Called to get the texture that contains the video frame
- LPDIRECT3DTEXTURE9 GetTexture();
- // Called to get the texture(video frame) size
- int GetTextureWidth();
- int GetTextureHeight();
- //
- void SetStreamSettings(uint16_t streamId,
- uint32_t zOrder,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight);
- int GetStreamSettings(uint16_t streamId,
- uint32_t& zOrder,
- float& startWidth,
- float& startHeight,
- float& stopWidth,
- float& stopHeight);
-
- int ReleaseTexture();
- int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
-
-protected:
-
-private:
- //critical section passed from the owner
- CriticalSectionWrapper* _critSect;
- LPDIRECT3DDEVICE9 _pd3dDevice;
- LPDIRECT3DTEXTURE9 _pTexture;
-
- bool _bufferIsUpdated;
- // the frame size
- int _width;
- int _height;
- //sream settings
- //TODO support multiple streams in one channel
- uint16_t _streamId;
- uint32_t _zOrder;
- float _startWidth;
- float _startHeight;
- float _stopWidth;
- float _stopHeight;
-};
-
-class VideoRenderDirect3D9: IVideoRenderWin
-{
-public:
- VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
- ~VideoRenderDirect3D9();
-
-public:
- //IVideoRenderWin
-
- /**************************************************************************
- *
- * Init
- *
- ***************************************************************************/
- virtual int32_t Init();
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback
- * CreateChannel(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteChannel(const uint32_t streamId);
-
- virtual int32_t GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom);
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual bool IsFullScreen();
-
- virtual int32_t SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ChangeWindow(void* window);
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-public:
- // Get a channel by channel id
- D3D9Channel* GetD3DChannel(int channel);
- int UpdateRenderSurface();
-
-protected:
- // The thread rendering the screen
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
-
-private:
- // Init/close the d3d device
- int InitDevice();
- int CloseDevice();
-
- // Transparent related functions
- int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
- DDCOLORKEY* transparentColorKey,
- DWORD width,
- DWORD height);
-
- CriticalSectionWrapper& _refD3DCritsect;
- Trace* _trace;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- EventTimerWrapper* _screenUpdateEvent;
-
- HWND _hWnd;
- bool _fullScreen;
- RECT _originalHwndRect;
- //FIXME we probably don't need this since all the information can be get from _d3dChannels
- int _channel;
- //Window size
- UINT _winWidth;
- UINT _winHeight;
-
- // Device
- LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
- LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
- LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
- LPDIRECT3DTEXTURE9 _pTextureLogo;
-
- std::map<int, D3D9Channel*> _d3dChannels;
- std::multimap<int, unsigned int> _d3dZorder;
-
- // The position where the logo will be placed
- float _logoLeft;
- float _logoTop;
- float _logoRight;
- float _logoBottom;
-
- typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
- LPDIRECT3DSURFACE9 _pd3dSurface;
-
- DWORD GetVertexProcessingCaps();
- int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
-
- D3DPRESENT_PARAMETERS _d3dpp;
- int ResetDevice();
-
- int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
- float startWidth, float startHeight,
- float stopWidth, float stopHeight);
-
- //code for providing graphics settings
- DWORD _totalMemory;
- DWORD _availableMemory;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc b/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc
deleted file mode 100644
index 042d7fdfa33..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#ifdef DIRECT3D9_RENDERING
-#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
-#endif
-
-#include <tchar.h>
-
-namespace webrtc {
-
-VideoRenderWindowsImpl::VideoRenderWindowsImpl(const int32_t id,
- const VideoRenderType videoRenderType, void* window, const bool fullscreen)
- : _renderWindowsCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
- _prtWindow(window),
- _fullscreen(fullscreen),
- _renderMethod(kVideoRenderWinD3D9),
- _ptrRendererWin(NULL) {
-}
-
-VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
-{
- delete &_renderWindowsCritsect;
- if (_ptrRendererWin)
- {
- delete _ptrRendererWin;
- _ptrRendererWin = NULL;
- }
-}
-
-int32_t VideoRenderWindowsImpl::Init()
-{
- // Create the win renderer
- switch (_renderMethod)
- {
- case kVideoRenderWinD3D9:
- {
-#ifdef DIRECT3D9_RENDERING
- VideoRenderDirect3D9* ptrRenderer;
- ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
- if (ptrRenderer == NULL)
- {
- break;
- }
- _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
-#else
- return NULL;
-#endif //DIRECT3D9_RENDERING
- }
- break;
- default:
- break;
- }
-
- //Init renderer
- if (_ptrRendererWin)
- return _ptrRendererWin->Init();
- else
- return -1;
-}
-
-int32_t VideoRenderWindowsImpl::ChangeWindow(void* window)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- if (!_ptrRendererWin)
- {
- return -1;
- }
- else
- {
- return _ptrRendererWin->ChangeWindow(window);
- }
-}
-
-VideoRenderCallback*
-VideoRenderWindowsImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- VideoRenderCallback* renderCallback = NULL;
-
- if (!_ptrRendererWin)
- {
- }
- else
- {
- renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
- top, right, bottom);
- }
-
- return renderCallback;
-}
-
-int32_t VideoRenderWindowsImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->DeleteChannel(streamId);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- zOrder = 0;
- left = 0;
- top = 0;
- right = 0;
- bottom = 0;
-
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
- top, right, bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::StartRender()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->StartRender();
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::StopRender()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->StopRender();
- }
- return error;
-}
-
-VideoRenderType VideoRenderWindowsImpl::RenderType()
-{
- return kRenderWindows;
-}
-
-RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderWindowsImpl::FullScreen()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- bool fullscreen = false;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- fullscreen = _ptrRendererWin->IsFullScreen();
- }
- return fullscreen;
-}
-
-int32_t VideoRenderWindowsImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- if (_ptrRendererWin)
- {
- return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
- availableGraphicsMemory);
- }
-
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return -1;
-}
-
-int32_t VideoRenderWindowsImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- screenWidth = 0;
- screenHeight = 0;
- return 0;
-}
-
-uint32_t VideoRenderWindowsImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- return 0;
-}
-
-int32_t VideoRenderWindowsImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
- bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
- top, right, bottom);
- }
-
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetTransparentBackground(
- const bool enable)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetTransparentBackground(enable);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetText(textId, text, textLength,
- textColorRef, backgroundColorRef,
- left, top, right, bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
- top, right, bottom);
- }
- return error;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h b/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h
deleted file mode 100644
index aaa3f81fc7e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
-
-#include <Winerror.h>
-#include <dxdiag.h>
-
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/windows/i_video_render_win.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-#define EXPAND(x) x, sizeof(x)/sizeof(TCHAR)
-
-enum VideoRenderWinMethod {
- kVideoRenderWinD3D9 = 0,
-};
-
-// Class definitions
-class VideoRenderWindowsImpl: IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderWindowsImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderWindowsImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-private:
- CriticalSectionWrapper& _renderWindowsCritsect;
-
- void* _prtWindow;
- bool _fullscreen;
-
- VideoRenderWinMethod _renderMethod;
- IVideoRenderWin* _ptrRendererWin;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_